From d2db7bad1bed5c00cee4f05852bff1c177040bb5 Mon Sep 17 00:00:00 2001 From: sysadmin75 Date: Mon, 1 Jun 2015 13:23:28 -0400 Subject: [PATCH 0001/1113] Fixes OSX fact gathering for the bridge interface. Issue #11104 --- lib/ansible/module_utils/facts.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py index 1162e05b9cf..f65f776a242 100644 --- a/lib/ansible/module_utils/facts.py +++ b/lib/ansible/module_utils/facts.py @@ -2163,7 +2163,13 @@ class DarwinNetwork(GenericBsdIfconfigNetwork, Network): current_if['media'] = 'Unknown' # Mac does not give us this current_if['media_select'] = words[1] if len(words) > 2: - current_if['media_type'] = words[2][1:-1] + # MacOSX sets the media to '' for bridge interface + # and parsing splits this into two words; this if/else helps + if words[1] == '': + current_if['media_select'] = 'Unknown' + current_if['media_type'] = 'unknown type' + else: + current_if['media_type'] = words[2][1:-1] if len(words) > 3: current_if['media_options'] = self.get_options(words[3]) From 7a3519bbaa7da3166504505638d1270bd675e7f1 Mon Sep 17 00:00:00 2001 From: Quentin Stafford-Fraser Date: Tue, 21 Jul 2015 19:25:00 +0100 Subject: [PATCH 0002/1113] Documentation for inventory ignored extensions --- docsite/rst/intro_dynamic_inventory.rst | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/docsite/rst/intro_dynamic_inventory.rst b/docsite/rst/intro_dynamic_inventory.rst index 5b634d86cd9..0de7abb80fc 100644 --- a/docsite/rst/intro_dynamic_inventory.rst +++ b/docsite/rst/intro_dynamic_inventory.rst @@ -218,13 +218,21 @@ to include it in the project. .. _using_multiple_sources: -Using Multiple Inventory Sources -```````````````````````````````` +Using Inventory Directories and Multiple Inventory Sources +`````````````````````````````````````````````````````````` If the location given to -i in Ansible is a directory (or as so configured in ansible.cfg), Ansible can use multiple inventory sources at the same time. When doing so, it is possible to mix both dynamic and statically managed inventory sources in the same ansible run. Instant hybrid cloud! +In an inventory directory, executable files will be treated as dynamic inventory sources and most other files as static sources. Files which end with any of the following will be ignored:: + + ~, .orig, .bak, .ini, .retry, .pyc, .pyo + +You can replace this list with your own selection by configuring an ``inventory_ignore_extensions`` list in ansible.cfg, or setting the ANSIBLE_INVENTORY_IGNORE environment variable. The value in either case should be a comma-separated list of patterns, as shown above. + +Any ``group_vars`` and ``host_vars`` subdirectories in and inventory directory will be interpreted as expected, making inventory directories a powerful way to organize different sets of configurations. + .. _static_groups_of_dynamic: Static Groups of Dynamic Groups From dfd19d6bd8141447135480ac05df25d765e95772 Mon Sep 17 00:00:00 2001 From: Quentin Stafford-Fraser Date: Tue, 21 Jul 2015 19:38:49 +0100 Subject: [PATCH 0003/1113] Fix typo in docs --- docsite/rst/intro_dynamic_inventory.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/intro_dynamic_inventory.rst b/docsite/rst/intro_dynamic_inventory.rst index 0de7abb80fc..729a6ba5172 100644 --- a/docsite/rst/intro_dynamic_inventory.rst +++ b/docsite/rst/intro_dynamic_inventory.rst @@ -231,7 +231,7 @@ In an inventory directory, executable files will be treated as dynamic inventory You can replace this list with your own selection by configuring an ``inventory_ignore_extensions`` list in ansible.cfg, or setting the ANSIBLE_INVENTORY_IGNORE environment variable. The value in either case should be a comma-separated list of patterns, as shown above. -Any ``group_vars`` and ``host_vars`` subdirectories in and inventory directory will be interpreted as expected, making inventory directories a powerful way to organize different sets of configurations. +Any ``group_vars`` and ``host_vars`` subdirectories in an inventory directory will be interpreted as expected, making inventory directories a powerful way to organize different sets of configurations. .. _static_groups_of_dynamic: From 009d0a4bb44dfc299b2212e322a4b93e16d60a4a Mon Sep 17 00:00:00 2001 From: Andy Grimm Date: Mon, 9 Mar 2015 10:49:54 -0400 Subject: [PATCH 0004/1113] Flexible tag-based naming for ec2 hosts Introduces destination_format and destination_format_tags to allow the construction of host names based on one or more ec2 tags and a python format string. --- contrib/inventory/ec2.ini | 10 ++++++++++ contrib/inventory/ec2.py | 11 ++++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/contrib/inventory/ec2.ini b/contrib/inventory/ec2.ini index a1d9b1d805d..5bac496ff59 100644 --- a/contrib/inventory/ec2.ini +++ b/contrib/inventory/ec2.ini @@ -40,6 +40,16 @@ destination_variable = public_dns_name # vpc_destination_variable = 'private_ip_address' vpc_destination_variable = ip_address +# The following two settings allow flexible ansible host naming based on a +# python format string and a comma-separated list of ec2 tags. Note that: +# +# 1) If the tags referenced are not present for some instances, empty strings +# will be substituted in the format string. +# 2) This overrides both destination_variable and vpc_destination_variable. +# +#destination_format = {0}.{1}.example.com +#destination_format_tags = Name,environment + # To tag instances on EC2 with the resource records that point to them from # Route53, uncomment and set 'route53' to True. route53 = False diff --git a/contrib/inventory/ec2.py b/contrib/inventory/ec2.py index a8e042e3f4b..8c8e5e94589 100755 --- a/contrib/inventory/ec2.py +++ b/contrib/inventory/ec2.py @@ -220,6 +220,13 @@ class Ec2Inventory(object): # Destination addresses self.destination_variable = config.get('ec2', 'destination_variable') self.vpc_destination_variable = config.get('ec2', 'vpc_destination_variable') + if config.has_option('ec2', 'destination_format') and \ + config.has_option('ec2', 'destination_format_tags'): + self.destination_format = config.get('ec2', 'destination_format') + self.destination_format_tags = config.get('ec2', 'destination_format_tags').split(',') + else: + self.destination_format = None + self.destination_format_tags = None # Route53 self.route53_enabled = config.getboolean('ec2', 'route53') @@ -536,7 +543,9 @@ class Ec2Inventory(object): return # Select the best destination address - if instance.subnet_id: + if self.destination_format and self.destination_format_tags: + dest = self.destination_format.format(*[ getattr(instance, 'tags').get(tag, '') for tag in self.destination_format_tags ]) + elif instance.subnet_id: dest = getattr(instance, self.vpc_destination_variable, None) if dest is None: dest = getattr(instance, 'tags').get(self.vpc_destination_variable, None) From be452c1b2792bd15c37b2f418417eeaee4632f3e Mon Sep 17 00:00:00 2001 From: Nathaniel Cohen Date: Mon, 14 Sep 2015 14:47:44 -0700 Subject: [PATCH 0005/1113] allow ConfigureRemotingForAnsible.ps1 script to function from 'public' adapters The current script fails on machines which have network interfaces designated as connected to "Public" networks (choices for network designation being Private, Domain, Public). This commit changes the script to NOT prevent winrm initialization when device is connected to a "Public" network. --- examples/scripts/ConfigureRemotingForAnsible.ps1 | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/examples/scripts/ConfigureRemotingForAnsible.ps1 b/examples/scripts/ConfigureRemotingForAnsible.ps1 index a67ea8afb2c..2555b2e97a3 100644 --- a/examples/scripts/ConfigureRemotingForAnsible.ps1 +++ b/examples/scripts/ConfigureRemotingForAnsible.ps1 @@ -1,10 +1,10 @@ -# Configure a Windows host for remote management with Ansible +# Configure a Windows host for remote management with Ansible # ----------------------------------------------------------- # # This script checks the current WinRM/PSRemoting configuration and makes the # necessary changes to allow Ansible to connect, authenticate and execute # PowerShell commands. -# +# # Set $VerbosePreference = "Continue" before running the script in order to # see the output messages. # @@ -17,6 +17,7 @@ Param ( [string]$SubjectName = $env:COMPUTERNAME, [int]$CertValidityDays = 365, + [switch]$SkipNetworkProfileCheck, $CreateSelfSignedCert = $true ) @@ -27,7 +28,7 @@ Function New-LegacySelfSignedCert [string]$SubjectName, [int]$ValidDays = 365 ) - + $name = New-Object -COM "X509Enrollment.CX500DistinguishedName.1" $name.Encode("CN=$SubjectName", 0) @@ -97,8 +98,14 @@ ElseIf ((Get-Service "WinRM").Status -ne "Running") # WinRM should be running; check that we have a PS session config. If (!(Get-PSSessionConfiguration -Verbose:$false) -or (!(Get-ChildItem WSMan:\localhost\Listener))) { - Write-Verbose "Enabling PS Remoting." + if ($SkipNetworkProfileCheck) { + Write-Verbose "Enabling PS Remoting without checking Network profile." + Enable-PSRemoting -SkipNetworkProfileCheck -Force -ErrorAction Stop + } + else { + Write-Verbose "Enabling PS Remoting" Enable-PSRemoting -Force -ErrorAction Stop + } } Else { From 8b6f8ff92898f7e1fd9b9db5d71dd1673262402d Mon Sep 17 00:00:00 2001 From: Nathaniel Cohen Date: Tue, 22 Sep 2015 11:57:15 -0700 Subject: [PATCH 0006/1113] Document -SkipNetworkProfileCheck switch --- examples/scripts/ConfigureRemotingForAnsible.ps1 | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/examples/scripts/ConfigureRemotingForAnsible.ps1 b/examples/scripts/ConfigureRemotingForAnsible.ps1 index 2555b2e97a3..bc8f3898263 100644 --- a/examples/scripts/ConfigureRemotingForAnsible.ps1 +++ b/examples/scripts/ConfigureRemotingForAnsible.ps1 @@ -7,6 +7,10 @@ # # Set $VerbosePreference = "Continue" before running the script in order to # see the output messages. +# Set $SkipNetworkProfileCheck to skip the network profile check. Without +# specifying this the script will only run if the device's interfaces are in +# DOMAIN or PRIVATE zones. Provide this switch if you want to enable winrm on +# a device with an interface in PUBLIC zone. # # Written by Trond Hindenes # Updated by Chris Church From fc0801e69bd2615c0bd254ecf2bbe58b44b81eaf Mon Sep 17 00:00:00 2001 From: Nathaniel Cohen Date: Tue, 22 Sep 2015 12:45:02 -0700 Subject: [PATCH 0007/1113] describe command line options for ConfigureAnsibleForRemoting --- docsite/rst/intro_windows.rst | 35 +++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/docsite/rst/intro_windows.rst b/docsite/rst/intro_windows.rst index 6e3cb5bc60d..89d4cc31504 100644 --- a/docsite/rst/intro_windows.rst +++ b/docsite/rst/intro_windows.rst @@ -8,7 +8,7 @@ Windows Support Windows: How Does It Work ````````````````````````` -As you may have already read, Ansible manages Linux/Unix machines using SSH by default. +As you may have already read, Ansible manages Linux/Unix machines using SSH by default. Starting in version 1.7, Ansible also contains support for managing Windows machines. This uses native PowerShell remoting, rather than SSH. @@ -40,22 +40,22 @@ Installing python-kerberos dependencies # Via Yum yum -y install python-devel krb5-devel krb5-libs krb5-workstation - + # Via Apt (Ubuntu) sudo apt-get install python-dev libkrb5-dev - + # Via Portage (Gentoo) - emerge -av app-crypt/mit-krb5 + emerge -av app-crypt/mit-krb5 emerge -av dev-python/setuptools # Via pkg (FreeBSD) sudo pkg install security/krb5 - + # Via OpenCSW (Solaris) pkgadd -d http://get.opencsw.org/now /opt/csw/bin/pkgutil -U - /opt/csw/bin/pkgutil -y -i libkrb5_3 - + /opt/csw/bin/pkgutil -y -i libkrb5_3 + # Via Pacman (Arch Linux) pacman -S krb5 @@ -115,18 +115,23 @@ Windows System Prep In order for Ansible to manage your windows machines, you will have to enable PowerShell remoting configured. -To automate setup of WinRM, you can run `this PowerShell script `_ on the remote machine. +To automate setup of WinRM, you can run `this PowerShell script `_ on the remote machine. -Admins may wish to modify this setup slightly, for instance to increase the timeframe of -the certificate. +The example script accepts a few arguments which Admins may choose to use to modify the default setup slightly, which might be appropriate in some cases. + +Pass the -CertValidityDays option to customize the expiration date of the generated certificate. + powershell.exe -File ConfigureRemotingForAnsible.ps1 -CertValidityDays 100 + +Pass the -SkipNetworkProfileCheck switch to configure winrm to listen on PUBLIC zone interfaces. (Without this option, the script will fail if any network interface on device is in PUBLIC zone) + powershell.exe -File ConfigureRemotingForAnsible.ps1 -SkipNetworkProfileCheck .. note:: - On Windows 7 and Server 2008 R2 machines, due to a bug in Windows + On Windows 7 and Server 2008 R2 machines, due to a bug in Windows Management Framework 3.0, it may be necessary to install this hotfix http://support.microsoft.com/kb/2842230 to avoid receiving out of memory and stack overflow exceptions. Newly-installed Server 2008 R2 systems which are not fully up to date with windows updates are known - to have this issue. + to have this issue. Windows 8.1 and Server 2012 R2 are not affected by this issue as they come with Windows Management Framework 4.0. @@ -145,8 +150,8 @@ Looking at an ansible checkout, copy the `examples/scripts/upgrade_to_ps3.ps1 `_. +Most of the Ansible modules in core Ansible are written for a combination of Linux/Unix machines and arbitrary web services, though there are various +Windows modules as listed in the `"windows" subcategory of the Ansible module index `_. Browse this index to see what is available. @@ -275,5 +280,3 @@ form of new modules, tweaks to existing modules, documentation, or something els Questions? Help? Ideas? Stop by the list on Google Groups `irc.freenode.net `_ #ansible IRC chat channel - - From fa332e1342ff99665504f036c82cf90f2fa04433 Mon Sep 17 00:00:00 2001 From: George Sudarkoff Date: Wed, 7 Oct 2015 10:32:50 -0700 Subject: [PATCH 0008/1113] Fail if the vault password script returns non-zero. --- lib/ansible/cli/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py index bf5e33e6be8..9391d386fb4 100644 --- a/lib/ansible/cli/__init__.py +++ b/lib/ansible/cli/__init__.py @@ -501,6 +501,8 @@ class CLI(object): except OSError as e: raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e)) stdout, stderr = p.communicate() + if p.returncode != 0: + raise AnsibleError("Vault password script %s returned non-zero (%s)." % (this_path, p.returncode)) vault_pass = stdout.strip('\r\n') else: try: From 00ccd2ee6e7ced5b5d7556ed466979ceeacfe3a2 Mon Sep 17 00:00:00 2001 From: Bernhard Lichtinger Date: Mon, 26 Oct 2015 14:22:21 +0100 Subject: [PATCH 0009/1113] Fix for SLES 11.4, which has now also an /etc/os-release file. --- lib/ansible/module_utils/facts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py index 1ba61bb77f0..4c7571ce3c5 100644 --- a/lib/ansible/module_utils/facts.py +++ b/lib/ansible/module_utils/facts.py @@ -418,9 +418,9 @@ class Facts(object): release = re.search("^PRETTY_NAME=[^(]+ \(?([^)]+?)\)", line) if release: self.facts['distribution_release'] = release.groups()[0] - elif 'enterprise' in data.lower(): + elif 'enterprise' in data.lower() and 'VERSION_ID' in line: release = re.search('^VERSION_ID="?[0-9]+\.?([0-9]*)"?', line) # SLES doesn't got funny release names - if release: + if release.group(1): release = release.group(1) else: release = "0" # no minor number, so it is the first release From 0bc32cbaeea54a0d27ab2654d4d9eb43064cf735 Mon Sep 17 00:00:00 2001 From: Florian Haas Date: Thu, 12 Nov 2015 21:19:40 +0100 Subject: [PATCH 0010/1113] Correct connection type returned by libvirt_lxc inventory script The correct connection type for LXC containers managed via libvirt is libvirt_lxc, not lxc. --- contrib/inventory/libvirt_lxc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contrib/inventory/libvirt_lxc.py b/contrib/inventory/libvirt_lxc.py index 1491afd577d..cb34d473cda 100755 --- a/contrib/inventory/libvirt_lxc.py +++ b/contrib/inventory/libvirt_lxc.py @@ -27,11 +27,11 @@ result['all'] = {} pipe = Popen(['virsh', '-q', '-c', 'lxc:///', 'list', '--name', '--all'], stdout=PIPE, universal_newlines=True) result['all']['hosts'] = [x[:-1] for x in pipe.stdout.readlines()] result['all']['vars'] = {} -result['all']['vars']['ansible_connection'] = 'lxc' +result['all']['vars']['ansible_connection'] = 'libvirt_lxc' if len(sys.argv) == 2 and sys.argv[1] == '--list': print(json.dumps(result)) elif len(sys.argv) == 3 and sys.argv[1] == '--host': - print(json.dumps({'ansible_connection': 'lxc'})) + print(json.dumps({'ansible_connection': 'libvirt_lxc'})) else: print("Need an argument, either --list or --host ") From 1b76a9cef2d74eba9fd786e43f1cf3364a8ac501 Mon Sep 17 00:00:00 2001 From: Jonathan Davila Date: Fri, 13 Nov 2015 18:19:09 -0700 Subject: [PATCH 0011/1113] Patch to remove dependency on boto when only using boto3 Updated with explicit check for HAS_BOTO3 --- lib/ansible/module_utils/ec2.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/lib/ansible/module_utils/ec2.py b/lib/ansible/module_utils/ec2.py index ac799772c2c..2edfd9e5d83 100644 --- a/lib/ansible/module_utils/ec2.py +++ b/lib/ansible/module_utils/ec2.py @@ -29,6 +29,7 @@ import os try: import boto3 + import botocore HAS_BOTO3 = True except: HAS_BOTO3 = False @@ -129,10 +130,14 @@ def get_aws_connection_info(module, boto3=False): elif 'EC2_REGION' in os.environ: region = os.environ['EC2_REGION'] else: - # boto.config.get returns None if config not found - region = boto.config.get('Boto', 'aws_region') - if not region: - region = boto.config.get('Boto', 'ec2_region') + if not boto3: + # boto.config.get returns None if config not found + region = boto.config.get('Boto', 'aws_region') + if not region: + region = boto.config.get('Boto', 'ec2_region') + elif boto3 and HAS_BOTO3: + # here we don't need to make an additional call, will default to 'us-east-1' if the below evaluates to None. + region = botocore.session.get_session().get_config_variable('region') if not security_token: if 'AWS_SECURITY_TOKEN' in os.environ: From 9761250a4b179d2064a49e2f4b4a66ba423de26f Mon Sep 17 00:00:00 2001 From: Mick Bass Date: Sun, 27 Sep 2015 17:12:13 -0600 Subject: [PATCH 0012/1113] Allow tree-ish to be used for galaxy role version Ensure that ansible-galaxy version can be a branch, a tag, or any tree-ish supported by git including specific commit IDs. For git scm roles, adds an explicit git checkout of the specified role_version prior to the git archive. This means that we'll always archive from HEAD of whatever role_version is checked out. role_version can be a branch, a tag, or any supported by git including specific commit IDs. These changes also ensure ansible-galaxy works for scm clones when specified version differs from repository default branch. --- lib/ansible/galaxy/role.py | 2 -- lib/ansible/playbook/role/requirement.py | 11 +++++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/lib/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py index dc9da5d79ce..5acd26c92dc 100644 --- a/lib/ansible/galaxy/role.py +++ b/lib/ansible/galaxy/role.py @@ -310,5 +310,3 @@ class GalaxyRole(object): } """ return dict(scm=self.scm, src=self.src, version=self.version, name=self.name) - - diff --git a/lib/ansible/playbook/role/requirement.py b/lib/ansible/playbook/role/requirement.py index 1a640247e25..807dd1e82fd 100644 --- a/lib/ansible/playbook/role/requirement.py +++ b/lib/ansible/playbook/role/requirement.py @@ -190,6 +190,17 @@ class RoleRequirement(RoleDefinition): if rc != 0: raise AnsibleError ("- command %s failed in directory %s (rc=%s)" % (' '.join(clone_cmd), tempdir, rc)) + if scm == 'git' and version: + checkout_cmd = [scm, 'checkout', version] + with open('/dev/null', 'w') as devnull: + try: + popen = subprocess.Popen(checkout_cmd, cwd=os.path.join(tempdir, name), stdout=devnull, stderr=devnull) + except (IOError, OSError): + raise AnsibleError("error executing: %s" % " ".join(checkout_cmd)) + rc = popen.wait() + if rc != 0: + raise AnsibleError("- command %s failed in directory %s (rc=%s)" % (' '.join(checkout_cmd), tempdir, rc)) + temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.tar') if scm == 'hg': archive_cmd = ['hg', 'archive', '--prefix', "%s/" % name] From ff19233ad33dc989e997e69b4f36cab56fae74da Mon Sep 17 00:00:00 2001 From: Will Thames Date: Thu, 22 Oct 2015 14:18:48 +1000 Subject: [PATCH 0013/1113] Add tests for #10620 --- test/integration/galaxy_roles.yml | 2 +- test/integration/galaxy_rolesfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/galaxy_roles.yml b/test/integration/galaxy_roles.yml index 5f4373c5004..3d2121f1683 100644 --- a/test/integration/galaxy_roles.yml +++ b/test/integration/galaxy_roles.yml @@ -3,7 +3,7 @@ name: oracle_java7 - src: git+http://bitbucket.org/willthames/git-ansible-galaxy - version: v1.6 + version: pr-10620 - src: http://bitbucket.org/willthames/hg-ansible-galaxy scm: hg diff --git a/test/integration/galaxy_rolesfile b/test/integration/galaxy_rolesfile index b78cdc11481..047eef95502 100644 --- a/test/integration/galaxy_rolesfile +++ b/test/integration/galaxy_rolesfile @@ -1,7 +1,7 @@ # deliberate non-empty whitespace line to follow -git+https://bitbucket.org/willthames/git-ansible-galaxy,v1.6 +git+https://bitbucket.org/willthames/git-ansible-galaxy,pr-10620 hg+https://bitbucket.org/willthames/hg-ansible-galaxy https://bitbucket.org/willthames/http-ansible-galaxy/get/master.tar.gz,,http-role # comment From 6d6d4f0c8e2b7d9a6883780d35cb56fedb8b2224 Mon Sep 17 00:00:00 2001 From: Arata Notsu Date: Tue, 1 Dec 2015 23:47:22 +0900 Subject: [PATCH 0014/1113] BOOLEAN should contain boolean literals It is natural that an argument_spec with choises=BOOLEAN accepts boolean literal (True, False) though the current implementation allows only string or int. --- lib/ansible/module_utils/basic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index d2cf09458ea..95857339539 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -34,8 +34,8 @@ ANSIBLE_VERSION = "<>" MODULE_ARGS = "<>" MODULE_COMPLEX_ARGS = "<>" -BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1] -BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0] +BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1, True] +BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0, False] BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE SELINUX_SPECIAL_FS="<>" From 7724c958e149e1b9dc1021936b31956792013e38 Mon Sep 17 00:00:00 2001 From: Ming Qian Date: Tue, 1 Dec 2015 11:24:17 -0800 Subject: [PATCH 0015/1113] Update intro_windows.rst first pull. thanks. --- docsite/rst/intro_windows.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docsite/rst/intro_windows.rst b/docsite/rst/intro_windows.rst index e5cbb94fafd..316d1eca1ac 100644 --- a/docsite/rst/intro_windows.rst +++ b/docsite/rst/intro_windows.rst @@ -166,6 +166,8 @@ In group_vars/windows.yml, define the following inventory variables:: ansible_port: 5986 ansible_connection: winrm +Attention for the older style variables (``ansible_ssh_*``): ansible_ssh_password doesn't exist, should be ansible_ssh_pass. + Although Ansible is mostly an SSH-oriented system, Windows management will not happen over SSH (`yet `). If you have installed the ``kerberos`` module and ``ansible_user`` contains ``@`` (e.g. ``username@realm``), Ansible will first attempt Kerberos authentication. *This method uses the principal you are authenticated to Kerberos with on the control machine and not ``ansible_user``*. If that fails, either because you are not signed into Kerberos on the control machine or because the corresponding domain account on the remote host is not available, then Ansible will fall back to "plain" username/password authentication. From 29f5c5db7178b3bb26f4dd8410269a44d17e5315 Mon Sep 17 00:00:00 2001 From: Peter Sprygada Date: Thu, 3 Dec 2015 12:50:23 -0500 Subject: [PATCH 0016/1113] bugfix for ios.py shared module argument creation This patch fixes a bug in module_utils/ios.py where the the wrong shared module arguments are being generated. This bug prevented the shared module from operating correctly. This patch should be generally applied. --- lib/ansible/module_utils/ios.py | 21 +++------------------ 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/lib/ansible/module_utils/ios.py b/lib/ansible/module_utils/ios.py index dc46a860c6a..085b68dcd28 100644 --- a/lib/ansible/module_utils/ios.py +++ b/lib/ansible/module_utils/ios.py @@ -80,7 +80,7 @@ def ios_module(**kwargs): """ spec = kwargs.get('argument_spec') or dict() - argument_spec = url_argument_spec() + argument_spec = shell_argument_spec() argument_spec.update(IOS_COMMON_ARGS) if kwargs.get('argument_spec'): argument_spec.update(kwargs['argument_spec']) @@ -150,21 +150,6 @@ class IosShell(object): responses.append(response) return responses -def ios_from_args(module): - """Extracts the set of argumetns to build a valid IOS connection - """ - params = dict() - for arg, attrs in IOS_COMMON_ARGS.iteritems(): - if module.params['device']: - params[arg] = module.params['device'].get(arg) - if arg not in params or module.params[arg]: - params[arg] = module.params[arg] - if params[arg] is None: - if attrs.get('required'): - module.fail_json(msg='argument %s is required' % arg) - params[arg] = attrs.get('default') - return params - def ios_connection(module): """Creates a connection to an IOS device based on the module arguments """ @@ -180,16 +165,16 @@ def ios_connection(module): shell = IosShell() shell.connect(host, port=port, username=username, password=password, timeout=timeout) + shell.send('terminal length 0') except paramiko.ssh_exception.AuthenticationException, exc: module.fail_json(msg=exc.message) except socket.error, exc: module.fail_json(msg=exc.strerror, errno=exc.errno) - shell.send('terminal length 0') - if module.params['enable_mode']: shell.authorize(module.params['enable_password']) return shell + From a1f516824ee2160121437edf6939ab2145972739 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 3 Dec 2015 18:23:08 -0800 Subject: [PATCH 0017/1113] corrected playbook path, reformated options help the last just to make the help consistent and readable --- lib/ansible/cli/pull.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py index 04586c1d0c5..9cc6c25e9f2 100644 --- a/lib/ansible/cli/pull.py +++ b/lib/ansible/cli/pull.py @@ -74,8 +74,10 @@ class PullCLI(CLI): help='sleep for random interval (between 0 and n number of seconds) before starting. This is a useful way to disperse git requests') self.parser.add_option('-f', '--force', dest='force', default=False, action='store_true', help='run the playbook even if the repository could not be updated') - self.parser.add_option('-d', '--directory', dest='dest', default='~/.ansible/pull', help='directory to checkout repository to') - self.parser.add_option('-U', '--url', dest='url', default=None, help='URL of the playbook repository') + self.parser.add_option('-d', '--directory', dest='dest', default='~/.ansible/pull', + help='directory to checkout repository to') + self.parser.add_option('-U', '--url', dest='url', default=None, + help='URL of the playbook repository') self.parser.add_option('-C', '--checkout', dest='checkout', help='branch/tag/commit to checkout. ' 'Defaults to behavior of repository module.') self.parser.add_option('--accept-host-key', default=False, dest='accept_host_key', action='store_true', @@ -174,8 +176,7 @@ class PullCLI(CLI): display.display("Repository has not changed, quitting.") return 0 - playbook = self.select_playbook(path) - + playbook = self.select_playbook(self.options.dest) if playbook is None: raise AnsibleOptionsError("Could not find a playbook to run.") From 8d5f36a6c23ad17116ee0bb24c07f83745efb8e0 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 3 Dec 2015 19:39:57 -0800 Subject: [PATCH 0018/1113] return unique list of hosts --- lib/ansible/inventory/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index fdcbd37e78e..59a3c37bf93 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -196,7 +196,7 @@ class Inventory(object): hosts = [ h for h in hosts if h in self._restriction ] HOSTS_PATTERNS_CACHE[pattern_hash] = hosts[:] - return hosts + return list(set(hosts)) @classmethod def split_host_pattern(cls, pattern): From e1c62fb5afd5344dc1f3ff1606803263218b79ea Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 3 Dec 2015 19:42:05 -0800 Subject: [PATCH 0019/1113] reverted to previous pull checkout dir behaviour This fixes bugs with not finding plays when not specifying checkout dir Also makes it backwards compatible --- lib/ansible/cli/pull.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py index 9cc6c25e9f2..b2e402126da 100644 --- a/lib/ansible/cli/pull.py +++ b/lib/ansible/cli/pull.py @@ -74,7 +74,7 @@ class PullCLI(CLI): help='sleep for random interval (between 0 and n number of seconds) before starting. This is a useful way to disperse git requests') self.parser.add_option('-f', '--force', dest='force', default=False, action='store_true', help='run the playbook even if the repository could not be updated') - self.parser.add_option('-d', '--directory', dest='dest', default='~/.ansible/pull', + self.parser.add_option('-d', '--directory', dest='dest', default=None, help='directory to checkout repository to') self.parser.add_option('-U', '--url', dest='url', default=None, help='URL of the playbook repository') @@ -90,6 +90,11 @@ class PullCLI(CLI): self.options, self.args = self.parser.parse_args() + if not self.options.dest: + hostname = socket.getfqdn() + # use a hostname dependent directory, in case of $HOME on nfs + self.options.dest = os.path.join('~/.ansible/pull', hostname) + if self.options.sleep: try: secs = random.randint(0,int(self.options.sleep)) From d5446f98046d379ec950b849317472982dcba757 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 3 Dec 2015 20:47:02 -0800 Subject: [PATCH 0020/1113] fixed ansible-pull broken options * sudo was not working, now it supports full become * now default checkout dir works, not only when specifying * paths for checkout dir get expanded * fixed limit options for playbook * added verbose and debug info --- lib/ansible/cli/__init__.py | 12 +++++++----- lib/ansible/cli/pull.py | 25 ++++++++++++++++--------- 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py index da4d1b92d3d..da1aabcc698 100644 --- a/lib/ansible/cli/__init__.py +++ b/lib/ansible/cli/__init__.py @@ -210,7 +210,7 @@ class CLI(object): @staticmethod def base_parser(usage="", output_opts=False, runas_opts=False, meta_opts=False, runtask_opts=False, vault_opts=False, module_opts=False, - async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False): + async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False, runas_prompt_opts=False): ''' create an options parser for most ansible scripts ''' # TODO: implement epilog parsing @@ -267,10 +267,6 @@ class CLI(object): if runas_opts: # priv user defaults to root later on to enable detecting when this option was given here - parser.add_option('-K', '--ask-sudo-pass', default=C.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true', - help='ask for sudo password (deprecated, use become)') - parser.add_option('--ask-su-pass', default=C.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true', - help='ask for su password (deprecated, use become)') parser.add_option("-s", "--sudo", default=C.DEFAULT_SUDO, action="store_true", dest='sudo', help="run operations with sudo (nopasswd) (deprecated, use become)") parser.add_option('-U', '--sudo-user', dest='sudo_user', default=None, @@ -287,6 +283,12 @@ class CLI(object): help="privilege escalation method to use (default=%s), valid choices: [ %s ]" % (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS))) parser.add_option('--become-user', default=None, dest='become_user', type='string', help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER) + + if runas_opts or runas_prompt_opts: + parser.add_option('-K', '--ask-sudo-pass', default=C.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true', + help='ask for sudo password (deprecated, use become)') + parser.add_option('--ask-su-pass', default=C.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true', + help='ask for su password (deprecated, use become)') parser.add_option('--ask-become-pass', default=False, dest='become_ask_pass', action='store_true', help='ask for privilege escalation password') diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py index b2e402126da..1543c704d57 100644 --- a/lib/ansible/cli/pull.py +++ b/lib/ansible/cli/pull.py @@ -64,10 +64,12 @@ class PullCLI(CLI): subset_opts=True, inventory_opts=True, module_opts=True, + runas_prompt_opts=True, ) # options unique to pull - self.parser.add_option('--purge', default=False, action='store_true', help='purge checkout after playbook run') + self.parser.add_option('--purge', default=False, action='store_true', + help='purge checkout after playbook run') self.parser.add_option('-o', '--only-if-changed', dest='ifchanged', default=False, action='store_true', help='only run the playbook if the repository has been updated') self.parser.add_option('-s', '--sleep', dest='sleep', default=None, @@ -94,6 +96,7 @@ class PullCLI(CLI): hostname = socket.getfqdn() # use a hostname dependent directory, in case of $HOME on nfs self.options.dest = os.path.join('~/.ansible/pull', hostname) + self.options.dest = os.path.expandvars(os.path.expanduser(self.options.dest)) if self.options.sleep: try: @@ -126,7 +129,7 @@ class PullCLI(CLI): node = platform.node() host = socket.getfqdn() limit_opts = 'localhost,%s,127.0.0.1' % ','.join(set([host, node, host.split('.')[0], node.split('.')[0]])) - base_opts = '-c local "%s"' % limit_opts + base_opts = '-c local ' if self.options.verbosity > 0: base_opts += ' -%s' % ''.join([ "v" for x in range(0, self.options.verbosity) ]) @@ -137,7 +140,7 @@ class PullCLI(CLI): else: inv_opts = self.options.inventory - #TODO: enable more repo modules hg/svn? + #FIXME: enable more repo modules hg/svn? if self.options.module_name == 'git': repo_opts = "name=%s dest=%s" % (self.options.url, self.options.dest) if self.options.checkout: @@ -157,8 +160,8 @@ class PullCLI(CLI): raise AnsibleOptionsError(("module '%s' not found.\n" % self.options.module_name)) bin_path = os.path.dirname(os.path.abspath(sys.argv[0])) - cmd = '%s/ansible -i "%s" %s -m %s -a "%s"' % ( - bin_path, inv_opts, base_opts, self.options.module_name, repo_opts + cmd = '%s/ansible -i "%s" %s -m %s -a "%s" "%s"' % ( + bin_path, inv_opts, base_opts, self.options.module_name, repo_opts, limit_opts ) for ev in self.options.extra_vars: @@ -170,6 +173,8 @@ class PullCLI(CLI): time.sleep(self.options.sleep) # RUN the Checkout command + display.debug("running ansible with VCS module to checkout repo") + display.vvvv('EXEC: %s' % cmd) rc, out, err = run_cmd(cmd, live=True) if rc != 0: @@ -193,16 +198,18 @@ class PullCLI(CLI): cmd += ' -i "%s"' % self.options.inventory for ev in self.options.extra_vars: cmd += ' -e "%s"' % ev - if self.options.ask_sudo_pass: - cmd += ' -K' + if self.options.ask_sudo_pass or self.options.ask_su_pass or self.options.become_ask_pass: + cmd += ' --ask-become-pass' if self.options.tags: cmd += ' -t "%s"' % self.options.tags - if self.options.limit: - cmd += ' -l "%s"' % self.options.limit + if self.options.subset: + cmd += ' -l "%s"' % self.options.subset os.chdir(self.options.dest) # RUN THE PLAYBOOK COMMAND + display.debug("running ansible-playbook to do actual work") + display.debug('EXEC: %s' % cmd) rc, out, err = run_cmd(cmd, live=True) if self.options.purge: From e385c91fa528cb5e835077331512307b231ba393 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 4 Dec 2015 09:57:06 -0800 Subject: [PATCH 0021/1113] Update submodule refs# --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index cd9a7667aa3..191347676ee 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit cd9a7667aa39bbc1ccd606ebebaf3c62f228d601 +Subproject commit 191347676eea08817da3fb237f24cdbf2d16e307 diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index 3c4f954f0fe..a10bdd6be94 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit 3c4f954f0fece5dcb3241d6d5391273334206241 +Subproject commit a10bdd6be948d3aa5fad7ff4959908d6e78e0528 From 750adbaa270bca5a63f443808a7b8ddc2a026d9a Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 4 Dec 2015 12:48:56 -0500 Subject: [PATCH 0022/1113] Changing up how host (in)equality is checked Fixes #13397 --- lib/ansible/inventory/dir.py | 2 +- lib/ansible/inventory/host.py | 2 +- test/units/inventory/test_host.py | 4 +--- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/lib/ansible/inventory/dir.py b/lib/ansible/inventory/dir.py index e4f7ee80f92..e716987fd5f 100644 --- a/lib/ansible/inventory/dir.py +++ b/lib/ansible/inventory/dir.py @@ -205,7 +205,7 @@ class InventoryDirectory(object): # because the __eq__/__ne__ methods in Host() compare the # name fields rather than references, we use id() here to # do the object comparison for merges - if id(self.hosts[host.name]) != id(host): + if self.hosts[host.name] != host: # different object, merge self._merge_hosts(self.hosts[host.name], host) diff --git a/lib/ansible/inventory/host.py b/lib/ansible/inventory/host.py index a561b951b45..a433463fa1b 100644 --- a/lib/ansible/inventory/host.py +++ b/lib/ansible/inventory/host.py @@ -38,7 +38,7 @@ class Host: def __eq__(self, other): if not isinstance(other, Host): return False - return self.name == other.name + return id(self) == id(other) def __ne__(self, other): return not self.__eq__(other) diff --git a/test/units/inventory/test_host.py b/test/units/inventory/test_host.py index 078d4321b57..5c0945f7b4e 100644 --- a/test/units/inventory/test_host.py +++ b/test/units/inventory/test_host.py @@ -29,9 +29,7 @@ class TestHost(unittest.TestCase): def test_equality(self): self.assertEqual(self.hostA, self.hostA) self.assertNotEqual(self.hostA, self.hostB) - self.assertEqual(self.hostA, Host('a')) - # __ne__ is a separate method - self.assertFalse(self.hostA != Host('a')) + self.assertNotEqual(self.hostA, Host('a')) def test_hashability(self): # equality implies the hash values are the same From 84507aedd4b4a4be48acf9657b90bb341c3bd1e2 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 4 Dec 2015 13:33:27 -0500 Subject: [PATCH 0023/1113] Adding a uuid field so we can track host equality across serialization too --- lib/ansible/inventory/host.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/ansible/inventory/host.py b/lib/ansible/inventory/host.py index a433463fa1b..6263dcbc80d 100644 --- a/lib/ansible/inventory/host.py +++ b/lib/ansible/inventory/host.py @@ -19,6 +19,8 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type +import uuid + from ansible.inventory.group import Group from ansible.utils.vars import combine_vars @@ -38,7 +40,7 @@ class Host: def __eq__(self, other): if not isinstance(other, Host): return False - return id(self) == id(other) + return self._uuid == other._uuid def __ne__(self, other): return not self.__eq__(other) @@ -55,6 +57,7 @@ class Host: name=self.name, vars=self.vars.copy(), address=self.address, + uuid=self._uuid, gathered_facts=self._gathered_facts, groups=groups, ) @@ -65,6 +68,7 @@ class Host: self.name = data.get('name') self.vars = data.get('vars', dict()) self.address = data.get('address', '') + self._uuid = data.get('uuid', uuid.uuid4()) groups = data.get('groups', []) for group_data in groups: @@ -84,6 +88,7 @@ class Host: self.set_variable('ansible_port', int(port)) self._gathered_facts = False + self._uuid = uuid.uuid4() def __repr__(self): return self.get_name() From 0434644d12c64918d5182a7c0b0057687b1cdbc2 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 4 Dec 2015 11:50:39 -0800 Subject: [PATCH 0024/1113] Transform exceptions into ansible messages via to_unicode instead of str to avoid tracebacks. Fixes #13385 --- lib/ansible/executor/task_executor.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index 4a7d7464ef8..5d7430fad25 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -146,7 +146,7 @@ class TaskExecutor: except AttributeError: pass except Exception as e: - display.debug("error closing connection: %s" % to_unicode(e)) + display.debug(u"error closing connection: %s" % to_unicode(e)) def _get_loop_items(self): ''' @@ -183,7 +183,7 @@ class TaskExecutor: loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True) except AnsibleUndefinedVariable as e: - if 'has no attribute' in str(e): + if u'has no attribute' in to_unicode(e): loop_terms = [] display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.") else: @@ -231,7 +231,7 @@ class TaskExecutor: tmp_task = self._task.copy() tmp_play_context = self._play_context.copy() except AnsibleParserError as e: - results.append(dict(failed=True, msg=str(e))) + results.append(dict(failed=True, msg=to_unicode(e))) continue # now we swap the internal task and play context with their copies, @@ -401,7 +401,7 @@ class TaskExecutor: try: result = self._handler.run(task_vars=variables) except AnsibleConnectionFailure as e: - return dict(unreachable=True, msg=str(e)) + return dict(unreachable=True, msg=to_unicode(e)) display.debug("handler run complete") if self._task.async > 0: @@ -412,7 +412,7 @@ class TaskExecutor: return result result = json.loads(result.get('stdout')) except (TypeError, ValueError) as e: - return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e)) + return dict(failed=True, msg=u"The async task did not return valid JSON: %s" % to_unicode(e)) if self._task.poll > 0: result = self._poll_async_result(result=result, templar=templar) From e8954e556a6f36e0eaeb8160bc04171ed655c43f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CBrice?= Date: Fri, 4 Dec 2015 16:24:19 -0500 Subject: [PATCH 0025/1113] comment examples in default hosts file --- examples/hosts | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/examples/hosts b/examples/hosts index ce4cbb7caa4..841f4bc6500 100644 --- a/examples/hosts +++ b/examples/hosts @@ -10,35 +10,35 @@ # Ex 1: Ungrouped hosts, specify before any group headers. -green.example.com -blue.example.com -192.168.100.1 -192.168.100.10 +## green.example.com +## blue.example.com +## 192.168.100.1 +## 192.168.100.10 # Ex 2: A collection of hosts belonging to the 'webservers' group -[webservers] -alpha.example.org -beta.example.org -192.168.1.100 -192.168.1.110 +## [webservers] +## alpha.example.org +## beta.example.org +## 192.168.1.100 +## 192.168.1.110 # If you have multiple hosts following a pattern you can specify # them like this: -www[001:006].example.com +## www[001:006].example.com # Ex 3: A collection of database servers in the 'dbservers' group -[dbservers] - -db01.intranet.mydomain.net -db02.intranet.mydomain.net -10.25.1.56 -10.25.1.57 +## [dbservers] +## +## db01.intranet.mydomain.net +## db02.intranet.mydomain.net +## 10.25.1.56 +## 10.25.1.57 # Here's another example of host ranges, this time there are no # leading 0s: -db-[99:101]-node.example.com +## db-[99:101]-node.example.com From 1eb0a1ddf7cf2f9501ea48915307652e8ab55049 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 4 Dec 2015 15:16:02 -0800 Subject: [PATCH 0026/1113] Correct VERSION in the devel branch --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 879b416e609..7ec1d6db408 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.1 +2.1.0 From a96a879fcf8c80ee37ff3898f729d7baeac1cd6f Mon Sep 17 00:00:00 2001 From: sam-at-github Date: Sat, 5 Dec 2015 13:06:58 +1100 Subject: [PATCH 0027/1113] Add fullstop to make sentence make sense. Touch parargraph while at it. --- docsite/rst/playbooks_variables.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docsite/rst/playbooks_variables.rst b/docsite/rst/playbooks_variables.rst index 18f1e57f728..307387a72e5 100644 --- a/docsite/rst/playbooks_variables.rst +++ b/docsite/rst/playbooks_variables.rst @@ -793,8 +793,8 @@ Basically, anything that goes into "role defaults" (the defaults folder inside t .. rubric:: Footnotes -.. [1] Tasks in each role will see their own role's defaults tasks outside of roles will the last role's defaults -.. [2] Variables defined in inventory file or provided by dynamic inventory +.. [1] Tasks in each role will see their own role's defaults. Tasks defined outside of a role will see the last role's defaults. +.. [2] Variables defined in inventory file or provided by dynamic inventory. .. note:: Within a any section, redefining a var will overwrite the previous instance. If multiple groups have the same variable, the last one loaded wins. From fa71c38c2a7332ed450464e9239aac6e6698b095 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 5 Dec 2015 01:47:35 -0500 Subject: [PATCH 0028/1113] updated pull location in changelog it was in between of backslash description and example --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f9f8b4b76a9..d246be10933 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,9 +37,9 @@ Ansible Changes By Release * New ssh configuration variables(`ansible_ssh_common_args`, `ansible_ssh_extra_args`) can be used to configure a per-group or per-host ssh ProxyCommand or set any other ssh options. `ansible_ssh_extra_args` is used to set options that are accepted only by ssh (not sftp or scp, which have their own analogous settings). +* ansible-pull can now verify the code it runs when using git as a source repository, using git's code signing and verification features. * Backslashes used when specifying parameters in jinja2 expressions in YAML dicts sometimes needed to be escaped twice. This has been fixed so that escaping once works. Here's an example of how playbooks need to be modified: -* ansible-pull can now verify the code it runs when using git as a source repository, using git's code signing and verification features. ``` # Syntax in 1.9.x From 0129fb0a44080d324d110c3d5c5223ab2aa138b2 Mon Sep 17 00:00:00 2001 From: Nils Steinger Date: Sat, 5 Dec 2015 15:28:37 +0100 Subject: [PATCH 0029/1113] Remove duplicates from host list *before* caching it MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ansible previously added hosts to the host list multiple times for commands like `ansible -i 'localhost,' -c local -m ping 'localhost,localhost' --list-hosts`. 8d5f36a fixed the obvious error, but still added the un-deduplicated list to a cache, so all future invocations of get_hosts() would retrieve a non-deduplicated list. This caused problems down the line: For some reason, Ansible only ever schedules "flush_handlers" tasks (instead of scheduling any actual tasks from the playbook) for hosts that are contained in the host lists multiple times. This probably happens because the host states are stored in a dictionary indexed by the hostnames, so duplicate hostname would cause the state to be overwritten by subsequent invocations of … something. --- lib/ansible/inventory/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index 59a3c37bf93..14cd169265b 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -195,8 +195,8 @@ class Inventory(object): if self._restriction is not None: hosts = [ h for h in hosts if h in self._restriction ] - HOSTS_PATTERNS_CACHE[pattern_hash] = hosts[:] - return list(set(hosts)) + HOSTS_PATTERNS_CACHE[pattern_hash] = list(set(hosts)) + return HOSTS_PATTERNS_CACHE[pattern_hash][:] @classmethod def split_host_pattern(cls, pattern): From a1f6d17e37b059aa9d34a004b0aed05a6b8fa3b3 Mon Sep 17 00:00:00 2001 From: Nils Steinger Date: Sat, 5 Dec 2015 15:40:49 +0100 Subject: [PATCH 0030/1113] More meaningful string representation for meta tasks (like 'noop' and 'flush_handlers') --- lib/ansible/playbook/task.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index 4f326b628bc..21dbc87becf 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -133,7 +133,10 @@ class Task(Base, Conditional, Taggable, Become): def __repr__(self): ''' returns a human readable representation of the task ''' - return "TASK: %s" % self.get_name() + if self.get_name() == 'meta ': + return "TASK: meta (%s)" % self.args['_raw_params'] + else: + return "TASK: %s" % self.get_name() def _preprocess_loop(self, ds, new_ds, k, v): ''' take a lookup plugin name and store it correctly ''' From f89f906f87c2c4d850702404f70cfabaa63be351 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 5 Dec 2015 10:10:25 -0500 Subject: [PATCH 0031/1113] simplified get_hosts code to have 1 retrun point --- lib/ansible/inventory/__init__.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index 14cd169265b..d7d0f03fb1f 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -178,24 +178,24 @@ class Inventory(object): if self._restriction: pattern_hash += u":%s" % to_unicode(self._restriction) - if pattern_hash in HOSTS_PATTERNS_CACHE: - return HOSTS_PATTERNS_CACHE[pattern_hash][:] + if pattern_hash not in HOSTS_PATTERNS_CACHE: - patterns = Inventory.split_host_pattern(pattern) - hosts = self._evaluate_patterns(patterns) + patterns = Inventory.split_host_pattern(pattern) + hosts = self._evaluate_patterns(patterns) - # mainly useful for hostvars[host] access - if not ignore_limits_and_restrictions: - # exclude hosts not in a subset, if defined - if self._subset: - subset = self._evaluate_patterns(self._subset) - hosts = [ h for h in hosts if h in subset ] + # mainly useful for hostvars[host] access + if not ignore_limits_and_restrictions: + # exclude hosts not in a subset, if defined + if self._subset: + subset = self._evaluate_patterns(self._subset) + hosts = [ h for h in hosts if h in subset ] + + # exclude hosts mentioned in any restriction (ex: failed hosts) + if self._restriction is not None: + hosts = [ h for h in hosts if h in self._restriction ] - # exclude hosts mentioned in any restriction (ex: failed hosts) - if self._restriction is not None: - hosts = [ h for h in hosts if h in self._restriction ] + HOSTS_PATTERNS_CACHE[pattern_hash] = list(set(hosts)) - HOSTS_PATTERNS_CACHE[pattern_hash] = list(set(hosts)) return HOSTS_PATTERNS_CACHE[pattern_hash][:] @classmethod From 8ea45e8608fc15e07493b11ce28fe3d3f38865b8 Mon Sep 17 00:00:00 2001 From: Luca Berruti Date: Sat, 5 Dec 2015 19:43:02 +0100 Subject: [PATCH 0032/1113] Make no_target_syslog consistent. no_target_syslog = False --> do log on target --- examples/ansible.cfg | 2 +- lib/ansible/constants.py | 2 +- lib/ansible/plugins/action/__init__.py | 2 +- lib/ansible/plugins/action/async.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/ansible.cfg b/examples/ansible.cfg index 74aef7a0246..87c089f45ae 100644 --- a/examples/ansible.cfg +++ b/examples/ansible.cfg @@ -182,7 +182,7 @@ #no_log = False # prevents logging of tasks, but only on the targets, data is still logged on the master/controller -#no_target_syslog = True +#no_target_syslog = False # controls the compression level of variables sent to # worker processes. At the default of 0, no compression diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 08d522fcb60..6faae928dbe 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -159,7 +159,7 @@ DEFAULT_VAR_COMPRESSION_LEVEL = get_config(p, DEFAULTS, 'var_compression_level', # disclosure DEFAULT_NO_LOG = get_config(p, DEFAULTS, 'no_log', 'ANSIBLE_NO_LOG', False, boolean=True) -DEFAULT_NO_TARGET_SYSLOG = get_config(p, DEFAULTS, 'no_target_syslog', 'ANSIBLE_NO_TARGET_SYSLOG', True, boolean=True) +DEFAULT_NO_TARGET_SYSLOG = get_config(p, DEFAULTS, 'no_target_syslog', 'ANSIBLE_NO_TARGET_SYSLOG', False, boolean=True) # selinux DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf, ramfs', islist=True) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index 64a3b51e5d3..497143224a7 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -382,7 +382,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): module_args['_ansible_check_mode'] = True # set no log in the module arguments, if required - if self._play_context.no_log or not C.DEFAULT_NO_TARGET_SYSLOG: + if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG: module_args['_ansible_no_log'] = True # set debug in the module arguments, if required diff --git a/lib/ansible/plugins/action/async.py b/lib/ansible/plugins/action/async.py index 51e2413af27..8a7175aeb86 100644 --- a/lib/ansible/plugins/action/async.py +++ b/lib/ansible/plugins/action/async.py @@ -48,7 +48,7 @@ class ActionModule(ActionBase): env_string = self._compute_environment_string() module_args = self._task.args.copy() - if self._play_context.no_log or not C.DEFAULT_NO_TARGET_SYSLOG: + if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG: module_args['_ansible_no_log'] = True # configure, upload, and chmod the target module From 955710267c1992c5e3b5b9eb77f4c76e289e3313 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 5 Dec 2015 15:59:51 -0500 Subject: [PATCH 0033/1113] only set become defaults at last possible moment tasks were overriding commandline with their defaults, not with the explicit setting, removed the setting of defaults from task init and pushed down to play context at last possible moment. fixes #13362 --- lib/ansible/playbook/become.py | 16 +++++++++------- lib/ansible/playbook/play_context.py | 3 +++ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/lib/ansible/playbook/become.py b/lib/ansible/playbook/become.py index 643f2b555d5..1e579751d46 100644 --- a/lib/ansible/playbook/become.py +++ b/lib/ansible/playbook/become.py @@ -90,16 +90,18 @@ class Become: display.deprecated("Instead of su/su_user, use become/become_user and set become_method to 'su' (default is sudo)") - # if we are becoming someone else, but some fields are unset, - # make sure they're initialized to the default config values - if ds.get('become', False): - if ds.get('become_method', None) is None: - ds['become_method'] = C.DEFAULT_BECOME_METHOD - if ds.get('become_user', None) is None: - ds['become_user'] = C.DEFAULT_BECOME_USER return ds + def set_become_defaults(self, become, become_method, become_user): + ''' if we are becoming someone else, but some fields are unset, + make sure they're initialized to the default config values ''' + if become: + if become_method is None: + become_method = C.DEFAULT_BECOME_METHOD + if become_user is None: + become_user = C.DEFAULT_BECOME_USER + def _get_attr_become(self): ''' Override for the 'become' getattr fetcher, used from Base. diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py index 5c020939808..9320a23ed9b 100644 --- a/lib/ansible/playbook/play_context.py +++ b/lib/ansible/playbook/play_context.py @@ -392,6 +392,9 @@ class PlayContext(Base): if new_info.no_log is None: new_info.no_log = C.DEFAULT_NO_LOG + # set become defaults if not previouslly set + task.set_become_defaults(new_info.become, new_info.become_method, new_info.become_user) + return new_info def make_become_cmd(self, cmd, executable=None): From 41773630edcf8ab138a36290c4904c6ba537390b Mon Sep 17 00:00:00 2001 From: Peter Sprygada Date: Mon, 23 Nov 2015 22:01:27 -0500 Subject: [PATCH 0034/1113] adds new device argument to nxapi command arguments The device argument allows a dict of nxapi parameters to be passed to the module to simplify passing the nxapi parameters --- lib/ansible/module_utils/nxapi.py | 75 ++++++++++++++++++++----------- 1 file changed, 50 insertions(+), 25 deletions(-) diff --git a/lib/ansible/module_utils/nxapi.py b/lib/ansible/module_utils/nxapi.py index 0589b9a50c3..35bcc442fbd 100644 --- a/lib/ansible/module_utils/nxapi.py +++ b/lib/ansible/module_utils/nxapi.py @@ -32,16 +32,16 @@ from ansible.module_utils.nxapi import * The nxapi module provides the following common argument spec: - * host (str) - [Required] The IPv4 address or FQDN of the network device + * host (str) - The IPv4 address or FQDN of the network device * port (str) - Overrides the default port to use for the HTTP/S connection. The default values are 80 for HTTP and 443 for HTTPS - * url_username (str) - [Required] The username to use to authenticate + * username (str) - The username to use to authenticate the HTTP/S connection. Aliases: username - * url_password (str) - [Required] The password to use to authenticate + * password (str) - The password to use to authenticate the HTTP/S connection. Aliases: password * use_ssl (bool) - Specifies whether or not to use an encrypted (HTTPS) @@ -51,6 +51,10 @@ The nxapi module provides the following common argument spec: device. Valid values in `cli_show`, `cli_show_ascii`, 'cli_conf` and `bash`. The default value is `cli_show_ascii` + * device (dict) - Used to send the entire set of connection parameters + as a dict object. This argument is mutually exclusive with the + host argument + In order to communicate with Cisco NXOS devices, the NXAPI feature must be enabled and configured on the device. @@ -58,34 +62,52 @@ must be enabled and configured on the device. NXAPI_COMMAND_TYPES = ['cli_show', 'cli_show_ascii', 'cli_conf', 'bash'] -def nxapi_argument_spec(spec=None): - """Creates an argument spec for working with NXAPI +NXAPI_COMMON_ARGS = dict( + host=dict(), + port=dict(), + username=dict(), + password=dict(), + use_ssl=dict(default=False, type='bool'), + device=dict(), + command_type=dict(default='cli_show_ascii', choices=NXAPI_COMMAND_TYPES) +) + +def nxapi_module(**kwargs): + """Append the common args to the argument_spec """ - arg_spec = url_argument_spec() - arg_spec.update(dict( - host=dict(required=True), - port=dict(), - url_username=dict(required=True, aliases=['username']), - url_password=dict(required=True, aliases=['password']), - use_ssl=dict(default=False, type='bool'), - command_type=dict(default='cli_show_ascii', choices=NXAPI_COMMAND_TYPES) - )) - if spec: - arg_spec.update(spec) - return arg_spec - -def nxapi_url(module): + spec = kwargs.get('argument_spec') or dict() + + argument_spec = url_argument_spec() + argument_spec.update(NXAPI_COMMON_ARGS) + if kwargs.get('argument_spec'): + argument_spec.update(kwargs['argument_spec']) + kwargs['argument_spec'] = argument_spec + + module = AnsibleModule(**kwargs) + + device = module.params.get('device') or dict() + for key, value in device.iteritems(): + if key in NXAPI_COMMON_ARGS: + module.params[key] = value + + params = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS)) + for key, value in params.iteritems(): + if key != 'device': + module.params[key] = value + + return module + +def nxapi_url(params): """Constructs a valid NXAPI url """ - if module.params['use_ssl']: + if params['use_ssl']: proto = 'https' else: proto = 'http' - host = module.params['host'] + host = params['host'] url = '{}://{}'.format(proto, host) - port = module.params['port'] - if module.params['port']: - url = '{}:{}'.format(url, module.params['port']) + if params['port']: + url = '{}:{}'.format(url, params['port']) url = '{}/ins'.format(url) return url @@ -109,7 +131,7 @@ def nxapi_body(commands, command_type, **kwargs): def nxapi_command(module, commands, command_type=None, **kwargs): """Sends the list of commands to the device over NXAPI """ - url = nxapi_url(module) + url = nxapi_url(module.params) command_type = command_type or module.params['command_type'] @@ -118,6 +140,9 @@ def nxapi_command(module, commands, command_type=None, **kwargs): headers = {'Content-Type': 'text/json'} + module.params['url_username'] = module.params['username'] + module.params['url_password'] = module.params['password'] + response, headers = fetch_url(module, url, data=data, headers=headers, method='POST') From a8e015cc22d248e965157605e30b810de280b0a4 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 6 Dec 2015 22:12:48 -0800 Subject: [PATCH 0035/1113] Add representers so we can output yaml for all the types we read in from yaml --- lib/ansible/parsing/yaml/dumper.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/ansible/parsing/yaml/dumper.py b/lib/ansible/parsing/yaml/dumper.py index a51289b09b9..a8a5015b8ea 100644 --- a/lib/ansible/parsing/yaml/dumper.py +++ b/lib/ansible/parsing/yaml/dumper.py @@ -22,7 +22,7 @@ __metaclass__ = type import yaml from ansible.compat.six import PY3 -from ansible.parsing.yaml.objects import AnsibleUnicode +from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping from ansible.vars.hostvars import HostVars class AnsibleDumper(yaml.SafeDumper): @@ -50,3 +50,13 @@ AnsibleDumper.add_representer( represent_hostvars, ) +AnsibleDumper.add_representer( + AnsibleSequence, + yaml.representer.SafeRepresenter.represent_list, +) + +AnsibleDumper.add_representer( + AnsibleMapping, + yaml.representer.SafeRepresenter.represent_dict, +) + From 4d637e5780503448840a3e4ef824b8f72aa5112a Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 6 Dec 2015 22:16:31 -0800 Subject: [PATCH 0036/1113] Use self.args when we parse arguments that way the arguments can be constructed manually --- lib/ansible/cli/adhoc.py | 2 +- lib/ansible/cli/doc.py | 2 +- lib/ansible/cli/galaxy.py | 2 +- lib/ansible/cli/playbook.py | 2 +- lib/ansible/cli/pull.py | 2 +- lib/ansible/cli/vault.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py index 25f29fc2976..120b2302112 100644 --- a/lib/ansible/cli/adhoc.py +++ b/lib/ansible/cli/adhoc.py @@ -70,7 +70,7 @@ class AdHocCLI(CLI): help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME, default=C.DEFAULT_MODULE_NAME) - self.options, self.args = self.parser.parse_args() + self.options, self.args = self.parser.parse_args(self.args[1:]) if len(self.args) != 1: raise AnsibleOptionsError("Missing target hosts") diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py index 4eef1dd5dd6..a17164eb50e 100644 --- a/lib/ansible/cli/doc.py +++ b/lib/ansible/cli/doc.py @@ -62,7 +62,7 @@ class DocCLI(CLI): self.parser.add_option("-s", "--snippet", action="store_true", default=False, dest='show_snippet', help='Show playbook snippet for specified module(s)') - self.options, self.args = self.parser.parse_args() + self.options, self.args = self.parser.parse_args(self.args[1:]) display.verbosity = self.options.verbosity def run(self): diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 31c21146fc1..94c04614ace 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -113,7 +113,7 @@ class GalaxyCLI(CLI): help='Force overwriting an existing role') # get options, args and galaxy object - self.options, self.args =self.parser.parse_args() + self.options, self.args =self.parser.parse_args(self.args[1:]) display.verbosity = self.options.verbosity self.galaxy = Galaxy(self.options) diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py index fc81f964563..a9c0ed018dc 100644 --- a/lib/ansible/cli/playbook.py +++ b/lib/ansible/cli/playbook.py @@ -72,7 +72,7 @@ class PlaybookCLI(CLI): parser.add_option('--start-at-task', dest='start_at_task', help="start the playbook at the task matching this name") - self.options, self.args = parser.parse_args() + self.options, self.args = parser.parse_args(self.args[1:]) self.parser = parser diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py index 1543c704d57..593d601e8d4 100644 --- a/lib/ansible/cli/pull.py +++ b/lib/ansible/cli/pull.py @@ -90,7 +90,7 @@ class PullCLI(CLI): help='verify GPG signature of checked out commit, if it fails abort running the playbook.' ' This needs the corresponding VCS module to support such an operation') - self.options, self.args = self.parser.parse_args() + self.options, self.args = self.parser.parse_args(self.args[1:]) if not self.options.dest: hostname = socket.getfqdn() diff --git a/lib/ansible/cli/vault.py b/lib/ansible/cli/vault.py index ac148d4770c..9908f17e578 100644 --- a/lib/ansible/cli/vault.py +++ b/lib/ansible/cli/vault.py @@ -69,7 +69,7 @@ class VaultCLI(CLI): elif self.action == "rekey": self.parser.set_usage("usage: %prog rekey [options] file_name") - self.options, self.args = self.parser.parse_args() + self.options, self.args = self.parser.parse_args(self.args[1:]) display.verbosity = self.options.verbosity can_output = ['encrypt', 'decrypt'] From 2c8eee956fb574ab0ef2ae362a2936f95a2d80cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yannig=20Perr=C3=A9?= Date: Mon, 7 Dec 2015 09:25:37 +0100 Subject: [PATCH 0037/1113] Fix issue when var name is the same as content. See https://github.com/ansible/ansible/issues/13453 for more details. --- lib/ansible/plugins/action/debug.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/debug.py b/lib/ansible/plugins/action/debug.py index a024e28b01d..1d8e28c7a4a 100644 --- a/lib/ansible/plugins/action/debug.py +++ b/lib/ansible/plugins/action/debug.py @@ -45,8 +45,12 @@ class ActionModule(ActionBase): # If var is a list or dict, use the type as key to display result[to_unicode(type(self._task.args['var']))] = results else: + # If var name is same as result, try to template it if results == self._task.args['var']: - results = "VARIABLE IS NOT DEFINED!" + try: + results = self._templar.template("{{" + results + "}}", convert_bare=True, fail_on_undefined=True) + except: + results = "VARIABLE IS NOT DEFINED!" result[self._task.args['var']] = results else: result['msg'] = 'here we are' From dcedfbe26c2aacc901fe5ef84b51103feb92990f Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 7 Dec 2015 09:54:55 -0800 Subject: [PATCH 0038/1113] corrected usage of ec2.py's profile option this was never introduced into ansible-playbook though the docs stated otherwise. We still explain how to use the env var to get the same result. --- docsite/rst/intro_dynamic_inventory.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docsite/rst/intro_dynamic_inventory.rst b/docsite/rst/intro_dynamic_inventory.rst index 1a2bd6f72c3..5f491ebc2ef 100644 --- a/docsite/rst/intro_dynamic_inventory.rst +++ b/docsite/rst/intro_dynamic_inventory.rst @@ -111,9 +111,8 @@ If you use boto profiles to manage multiple AWS accounts, you can pass ``--profi aws_access_key_id = aws_secret_access_key = -You can then run ``ec2.py --profile prod`` to get the inventory for the prod account, or run playbooks with: ``ansible-playbook -i 'ec2.py --profile prod' myplaybook.yml``. - -Alternatively, use the ``AWS_PROFILE`` variable - e.g. ``AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.yml`` +You can then run ``ec2.py --profile prod`` to get the inventory for the prod account, this option is not supported by ``anisble-playbook`` though. +But you can use the ``AWS_PROFILE`` variable - e.g. ``AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.yml`` Since each region requires its own API call, if you are only using a small set of regions, feel free to edit ``ec2.ini`` and list only the regions you are interested in. There are other config options in ``ec2.ini`` including cache control, and destination variables. From 97626475db9fab72c27a7904d8e745638a6dde1f Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 7 Dec 2015 10:04:48 -0800 Subject: [PATCH 0039/1113] added new ec2_vpc_net_facts to 2.1 changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d246be10933..36886531bb5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ Ansible Changes By Release ## 2.1 TBD - ACTIVE DEVELOPMENT ####New Modules: +* aws: ec2_vpc_net_facts * cloudstack: cs_volume ####New Filters: From 9ae1dede0387c02b0f3772f168e94c99ce9f23a8 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 8 Dec 2015 06:36:04 -0800 Subject: [PATCH 0040/1113] adhoc does not load plugins by default reimplemented feature from 1.x which kept additional callbacks from poluting adhoc unless specifically asked for through configuration. --- lib/ansible/cli/adhoc.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py index 120b2302112..912b07a5c72 100644 --- a/lib/ansible/cli/adhoc.py +++ b/lib/ansible/cli/adhoc.py @@ -163,6 +163,9 @@ class AdHocCLI(CLI): else: cb = 'minimal' + if not C.DEFAULT_LOAD_CALLBACK_PLUGINS: + C.DEFAULT_CALLBACK_WHITELIST = [] + if self.options.tree: C.DEFAULT_CALLBACK_WHITELIST.append('tree') C.TREE_DIR = self.options.tree From 8d500215b68aafe49c0416867af3fc701addf602 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 12 Nov 2015 16:15:42 -0500 Subject: [PATCH 0041/1113] trigger jenkins integration tests --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index cec8ccca971..2e1f15559d3 100644 --- a/README.md +++ b/README.md @@ -55,3 +55,4 @@ Ansible was created by [Michael DeHaan](https://github.com/mpdehaan) (michael.de Ansible is sponsored by [Ansible, Inc](http://ansible.com) + From 970d7cadb7f50e5f55b3aa1c12af130957f67204 Mon Sep 17 00:00:00 2001 From: David L Ballenger Date: Tue, 8 Dec 2015 07:11:02 -0800 Subject: [PATCH 0042/1113] Add ssh_host support for MacOSX El Capitan. OS X El Capitan moved the /etc/ssh_* files into /etc/ssh/. This fix adds a distribution version check for Darwin to set the keydir appropriately on El Capitan and later. --- lib/ansible/module_utils/facts.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py index 4120a51fb5b..94a5a11f726 100644 --- a/lib/ansible/module_utils/facts.py +++ b/lib/ansible/module_utils/facts.py @@ -524,7 +524,10 @@ class Facts(object): keytypes = ('dsa', 'rsa', 'ecdsa', 'ed25519') if self.facts['system'] == 'Darwin': - keydir = '/etc' + if self.facts['distribution'] == 'MacOSX' and LooseVersion(self.facts['distribution_version']) >= LooseVersion('10.11') : + keydir = '/etc/ssh' + else: + keydir = '/etc' else: keydir = '/etc/ssh' From 9c4eae525306bf201304a15d36f531b0308cd25e Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Tue, 8 Dec 2015 11:55:35 -0500 Subject: [PATCH 0043/1113] Fix always_run support in the action plugin for template when copying Fixes #13418 --- lib/ansible/plugins/action/template.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py index 109f3e80c0b..5edc4e8a2c4 100644 --- a/lib/ansible/plugins/action/template.py +++ b/lib/ansible/plugins/action/template.py @@ -157,7 +157,7 @@ class ActionModule(ActionBase): if self._play_context.diff: diff = self._get_diff_data(dest, resultant, task_vars, source_file=False) - if not self._play_context.check_mode: # do actual work thorugh copy + if not self._play_context.check_mode or self._task.always_run: # do actual work thorugh copy xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant) # fix file permissions when the copy is done as a different user From 5cac8efd73ff39268d2bebc1f501e3ae662add9d Mon Sep 17 00:00:00 2001 From: Jeremy Audet Date: Tue, 8 Dec 2015 09:39:45 -0500 Subject: [PATCH 0044/1113] Make "make webdocs" compatible with Python 3 The `webdocs` make target fails under Python 3. It fails due to a variety of syntax errors, such as the use of `except Foo, e` and `print 'foo'`. Fix #13463 by making code compatible with both Python 2 and 3. --- docsite/build-site.py | 23 ++++++++++++----------- hacking/module_formatter.py | 4 ++-- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/docsite/build-site.py b/docsite/build-site.py index 587a189f077..24f9fc9a647 100755 --- a/docsite/build-site.py +++ b/docsite/build-site.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . +from __future__ import print_function __docformat__ = 'restructuredtext' @@ -24,9 +25,9 @@ import traceback try: from sphinx.application import Sphinx except ImportError: - print "#################################" - print "Dependency missing: Python Sphinx" - print "#################################" + print("#################################") + print("Dependency missing: Python Sphinx") + print("#################################") sys.exit(1) import os @@ -40,7 +41,7 @@ class SphinxBuilder(object): """ Run the DocCommand. """ - print "Creating html documentation ..." + print("Creating html documentation ...") try: buildername = 'html' @@ -69,10 +70,10 @@ class SphinxBuilder(object): app.builder.build_all() - except ImportError, ie: + except ImportError: traceback.print_exc() - except Exception, ex: - print >> sys.stderr, "FAIL! exiting ... (%s)" % ex + except Exception as ex: + print("FAIL! exiting ... (%s)" % ex, file=sys.stderr) def build_docs(self): self.app.builder.build_all() @@ -83,9 +84,9 @@ def build_rst_docs(): if __name__ == '__main__': if '-h' in sys.argv or '--help' in sys.argv: - print "This script builds the html documentation from rst/asciidoc sources.\n" - print " Run 'make docs' to build everything." - print " Run 'make viewdocs' to build and then preview in a web browser." + print("This script builds the html documentation from rst/asciidoc sources.\n") + print(" Run 'make docs' to build everything.") + print(" Run 'make viewdocs' to build and then preview in a web browser.") sys.exit(0) build_rst_docs() @@ -93,4 +94,4 @@ if __name__ == '__main__': if "view" in sys.argv: import webbrowser if not webbrowser.open('htmlout/index.html'): - print >> sys.stderr, "Could not open on your webbrowser." + print("Could not open on your webbrowser.", file=sys.stderr) diff --git a/hacking/module_formatter.py b/hacking/module_formatter.py index f4ab5d7d9ab..4c94ca3f2c4 100755 --- a/hacking/module_formatter.py +++ b/hacking/module_formatter.py @@ -140,7 +140,7 @@ def list_modules(module_dir, depth=0): if os.path.isdir(d): res = list_modules(d, depth + 1) - for key in res.keys(): + for key in list(res.keys()): if key in categories: categories[key] = merge_hash(categories[key], res[key]) res.pop(key, None) @@ -451,7 +451,7 @@ def main(): categories = list_modules(options.module_dir) last_category = None - category_names = categories.keys() + category_names = list(categories.keys()) category_names.sort() category_list_path = os.path.join(options.output_dir, "modules_by_category.rst") From d4ccb0be59c86d8518ba4becaed5c7442d8758fc Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 8 Dec 2015 09:20:49 -0800 Subject: [PATCH 0045/1113] have always_run override check mode for a task Fixes #13418 --- lib/ansible/playbook/play_context.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py index 9320a23ed9b..81223500adf 100644 --- a/lib/ansible/playbook/play_context.py +++ b/lib/ansible/playbook/play_context.py @@ -395,6 +395,10 @@ class PlayContext(Base): # set become defaults if not previouslly set task.set_become_defaults(new_info.become, new_info.become_method, new_info.become_user) + # have always_run override check mode + if task.always_run: + new_info.check_mode = False + return new_info def make_become_cmd(self, cmd, executable=None): From 7ffd578a9d38b80e71ef6df2219f7e887e2909b7 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 8 Dec 2015 09:24:20 -0800 Subject: [PATCH 0046/1113] Revert "Fix always_run support in the action plugin for template when copying" This reverts commit 9c4eae525306bf201304a15d36f531b0308cd25e. --- lib/ansible/plugins/action/template.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py index 5edc4e8a2c4..109f3e80c0b 100644 --- a/lib/ansible/plugins/action/template.py +++ b/lib/ansible/plugins/action/template.py @@ -157,7 +157,7 @@ class ActionModule(ActionBase): if self._play_context.diff: diff = self._get_diff_data(dest, resultant, task_vars, source_file=False) - if not self._play_context.check_mode or self._task.always_run: # do actual work thorugh copy + if not self._play_context.check_mode: # do actual work thorugh copy xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant) # fix file permissions when the copy is done as a different user From 05c8bb79f8158ca8a93d50bc798dd1bed02aaa89 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 8 Dec 2015 12:24:42 -0500 Subject: [PATCH 0047/1113] playbook that Ansible jenkins runs moved into core The playbook is already running in jenkins and works. This moves the assets into core for ease of maintenance going forward. --- .../ansible.cfg | 2 + .../ec2.yml | 41 ++++++++++ .../inventory | 1 + .../inventory.dynamic | 3 + .../main.yml | 62 ++++++++++++++ .../roles/ansible_deps/.gitignore | 1 + .../roles/ansible_deps/.travis.yml | 37 +++++++++ .../roles/ansible_deps/README.md | 8 ++ .../roles/ansible_deps/defaults/main.yml | 2 + .../roles/ansible_deps/handlers/main.yml | 2 + .../ansible_deps/meta/.galaxy_install_info | 1 + .../roles/ansible_deps/meta/main.yml | 23 ++++++ .../roles/ansible_deps/tasks/main.yml | 81 +++++++++++++++++++ .../roles/ansible_deps/test/inventory | 1 + .../roles/ansible_deps/test/main.yml | 29 +++++++ .../roles/ansible_deps/test/requirements.yml | 2 + .../roles/ansible_deps/vars/main.yml | 2 + .../roles/run_integration/tasks/main.yml | 20 +++++ 18 files changed, 318 insertions(+) create mode 100644 test/utils/ansible-playbook_integration_runner/ansible.cfg create mode 100644 test/utils/ansible-playbook_integration_runner/ec2.yml create mode 100644 test/utils/ansible-playbook_integration_runner/inventory create mode 100644 test/utils/ansible-playbook_integration_runner/inventory.dynamic create mode 100644 test/utils/ansible-playbook_integration_runner/main.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.gitignore create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.travis.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/README.md create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/defaults/main.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/handlers/main.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/.galaxy_install_info create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/main.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/tasks/main.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/inventory create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/main.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/requirements.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_deps/vars/main.yml create mode 100644 test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml diff --git a/test/utils/ansible-playbook_integration_runner/ansible.cfg b/test/utils/ansible-playbook_integration_runner/ansible.cfg new file mode 100644 index 00000000000..14c80651521 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/ansible.cfg @@ -0,0 +1,2 @@ +[defaults] +host_key_checking = False diff --git a/test/utils/ansible-playbook_integration_runner/ec2.yml b/test/utils/ansible-playbook_integration_runner/ec2.yml new file mode 100644 index 00000000000..59e15f0da1a --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/ec2.yml @@ -0,0 +1,41 @@ +- name: Launch Instance + ec2: + group_id: 'sg-07bb906d' # jenkins-slave_new + count: 1 + instance_type: 'm3.medium' + image: '{{ item.image }}' + wait: true + region: 'us-east-1' + keypair: '{{ keypair }}' + aws_access_key: "{{ aws_access_key|default(lookup('env', 'AWS_ACCESS_KEY')) }}" + aws_secret_key: "{{ aws_secret_key|default(lookup('env', 'AWS_SECRET_KEY')) }}" + instance_tags: + jenkins: jenkins_ansible_pr_test + register: ec2 + with_items: slaves +# We could do an async here, that would speed things up + + +- name: Wait for SSH + wait_for: + host: "{{ item['instances'][0]['public_ip'] }}" + port: 22 + delay: 10 + timeout: 320 + state: started + with_items: ec2.results + +- name: Wait a little longer for centos + pause: seconds=20 + +- name: Add hosts group temporary inventory group with pem path + add_host: + name: "{{ item.1.platform }} {{ ec2.results[item.0]['instances'][0]['public_ip'] }}" + groups: dynamic_hosts + ansible_ssh_host: "{{ ec2.results[item.0]['instances'][0]['public_ip'] }}" + ansible_ssh_private_key_file: '{{ pem_path }}' + ansible_ssh_user: "{{ item.1.ssh_user }}" + ec2_vars: "{{ ec2.results[item.0]['instances'][0] }}" + ec2_instance_ids: "{{ ec2.results[item.0]['instance_ids'] }}" + with_indexed_items: slaves + diff --git a/test/utils/ansible-playbook_integration_runner/inventory b/test/utils/ansible-playbook_integration_runner/inventory new file mode 100644 index 00000000000..42de3a1b5d7 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/inventory @@ -0,0 +1 @@ +localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env python" diff --git a/test/utils/ansible-playbook_integration_runner/inventory.dynamic b/test/utils/ansible-playbook_integration_runner/inventory.dynamic new file mode 100644 index 00000000000..1aa03b4ed8d --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/inventory.dynamic @@ -0,0 +1,3 @@ +localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env python" +[dynamic_hosts] +54.157.26.110 ansible_ssh_user=root ansible_ssh_private_key_file=/Users/meyers/Dropbox/.ssh/Ansible_chris_meyers.pem diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml new file mode 100644 index 00000000000..8661a6dba9e --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -0,0 +1,62 @@ +- hosts: all + connection: local + vars: + slaves: + - distribution: "Ubuntu" + version: "12.04" + image: "ami-2ccc7a44" + ssh_user: "ubuntu" + platform: "ubuntu-12.04-x86_64" + - distribution: "Ubuntu" + version: "14.04" + image: "ami-9a562df2" + ssh_user: "ubuntu" + platform: "ubuntu-14.04-x86_64" + - distribution: "CentOS" + version: "6.5" + image: "ami-8997afe0" + ssh_user: "root" + platform: "centos-6.5-x86_64" + - distribution: "CentOS" + version: "7" + image: "ami-96a818fe" + ssh_user: "centos" + platform: "centos-7-x86_64" + + tasks: + - debug: var=ansible_version + - include: ec2.yml + when: groups['dynamic_hosts'] is not defined + +- hosts: dynamic_hosts + sudo: true + vars: + credentials_file: '' + test_flags: "" + make_target: "non_destructive" + #pre_tasks: + roles: + - { role: ansible_deps, tags: ansible_deps } + - { role: run_integration, + tags: run_integration, + run_integration_test_flags: "{{ test_flags }}", + run_integration_credentials_file: "{{ credentials_file }}", + run_integration_make_target: "{{ make_target }}", } + tasks: + + - name: Kill ec2 instances + sudo: false + local_action: + module: ec2 + state: absent + region: 'us-east-1' + instance_ids: "{{ hostvars[item]['ec2_instance_ids'] }}" + when: hostvars[item]['ec2_instance_ids'] is defined and item == inventory_hostname + with_items: groups['dynamic_hosts'] + + - set_fact: + ansible_connection: local + + - name: Fail + shell: 'echo "{{ inventory_hostname }}, Failed" && exit 1' + when: "test_results.rc != 0" diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.gitignore b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.gitignore new file mode 100644 index 00000000000..1377554ebea --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.gitignore @@ -0,0 +1 @@ +*.swp diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.travis.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.travis.yml new file mode 100644 index 00000000000..2264f0b20a7 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.travis.yml @@ -0,0 +1,37 @@ +sudo: required +dist: trusty +language: python +python: + - "2.7" +services: + - docker +env: + global: + - PATH="/usr/bin:$PATH" + +before_install: + # Ansible doesn't play well with virtualenv + - deactivate + - sudo apt-get update -qq + - sudo apt-get install docker-engine + +install: + - sudo pip install docker-py + # software-properties-common for ubuntu 14.04 + # python-software-properties for ubuntu 12.04 + - sudo apt-get install -y sshpass software-properties-common python-software-properties + - sudo apt-add-repository -y ppa:ansible/ansible + - sudo apt-get update -qq + - sudo apt-get install -y ansible + - sudo rm /usr/bin/python && sudo ln -s /usr/bin/python2.7 /usr/bin/python + - ansible-galaxy install -r test/requirements.yml -p test/roles/ + +script: + # Ensure any invocation of ansible-playbook (i.e. sudo) results in host_key_checking disabled + - sudo ansible all -i "127.0.0.1," -m lineinfile -a "regexp=^#host_key_checking dest=/etc/ansible/ansible.cfg line='host_key_checking = False'" -c local + - ansible-playbook -i test/inventory test/main.yml --syntax-check + - sudo ansible-playbook -i test/inventory test/main.yml + +notifications: + # notify ansible galaxy of results + webhooks: http://goo.gl/nSuq9h diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/README.md b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/README.md new file mode 100644 index 00000000000..f0fc755863c --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/README.md @@ -0,0 +1,8 @@ +[![Build Status](https://travis-ci.org/chrismeyersfsu/role-ansible_deps.svg)](https://travis-ci.org/chrismeyersfsu/role-ansible_deps) + +ansible_deps +========= + +Install needed packages to run ansible integration tests. + +This role is periodically synced from ansible core repo to chrismeyersfsu/role-ansible_deps so that automated tests may run and so this role is accessible from galaxy. diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/defaults/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/defaults/main.yml new file mode 100644 index 00000000000..c7837fc56b1 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/defaults/main.yml @@ -0,0 +1,2 @@ +--- +# defaults file for . diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/handlers/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/handlers/main.yml new file mode 100644 index 00000000000..050cdd12342 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/handlers/main.yml @@ -0,0 +1,2 @@ +--- +# handlers file for . diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/.galaxy_install_info b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/.galaxy_install_info new file mode 100644 index 00000000000..ffc298fff6f --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/.galaxy_install_info @@ -0,0 +1 @@ +{install_date: 'Tue Dec 8 15:06:28 2015', version: master} diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/main.yml new file mode 100644 index 00000000000..07c15d619ee --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/main.yml @@ -0,0 +1,23 @@ +--- +galaxy_info: + author: Chris Meyers + description: install ansible integration test dependencies + company: Ansible + license: license (GPLv2, CC-BY, etc) + min_ansible_version: 1.2 + platforms: + - name: EL + versions: + - 6 + - 7 + - name: Ubuntu + versions: + - precise + - trusty + galaxy_tags: + - testing + - integration + - ansible + - dependencies +dependencies: [] + diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/tasks/main.yml new file mode 100644 index 00000000000..f71128921d9 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/tasks/main.yml @@ -0,0 +1,81 @@ +--- + +- name: Install sudo + yum: name=sudo state=installed + ignore_errors: true + when: ansible_os_family == 'RedHat' + +- name: Install sudo + apt: name=sudo state=installed + ignore_errors: true + when: ansible_os_family == 'Debian' + +- name: Install RH epel + yum: name="epel-release" state=installed + sudo: true + when: ansible_os_family == 'RedHat' + +- name: Install RH ansible dependencies + yum: name="{{ item }}" state=installed + sudo: true + with_items: + - python-pip + - python-httplib2 + - rsync + - subversion + - mercurial + - git + - rubygems + - unzip + - openssl + - make + - gcc + - python-devel + - libselinux-python + when: ansible_os_family == 'RedHat' + +- apt: update_cache=yes + when: ansible_os_family == 'Debian' + +- name: Install Debian ansible dependencies + apt: name="{{ item }}" state=installed update_cache=yes + sudo: true + with_items: + - python-pip + - python-httplib2 + - rsync + - subversion + - mercurial + - git + - unzip + - python-dev + when: ansible_os_family == 'Debian' + +- name: Install ubuntu 12.04 ansible dependencies + apt: name="{{ item }}" state=installed update_cache=yes + sudo: true + with_items: + - rubygems + when: ansible_distribution == 'Ubuntu' and ansible_distribution_version == "12.04" + +- name: Install ubuntu 14.04 ansible dependencies + apt: name="{{ item }}" state=installed update_cache=yes + sudo: true + with_items: + - rubygems-integration + when: ansible_distribution == 'Ubuntu' and ansible_distribution_version == "14.04" + +- name: Install ansible pip deps + sudo: true + pip: name="{{ item }}" + with_items: + - PyYAML + - Jinja2 + - paramiko + +- name: Remove tty sudo requirement + sudo: true + lineinfile: "dest=/etc/sudoers regexp='^Defaults[ , ]*requiretty' line='#Defaults requiretty'" + when: ansible_os_family == 'RedHat' + + diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/inventory b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/inventory new file mode 100644 index 00000000000..2302edae31b --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/inventory @@ -0,0 +1 @@ +localhost ansible_connection=local diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/main.yml new file mode 100644 index 00000000000..95617dbfac3 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/main.yml @@ -0,0 +1,29 @@ +--- +- name: Bring up docker containers + hosts: localhost + gather_facts: false + vars: + inventory: + - name: ansible_deps_host_1 + image: "chrismeyers/centos6" + - name: ansible_deps_host_2 + image: "chrismeyers/ubuntu12.04" + - name: ansible_deps_host_3 + image: "ubuntu-upstart:14.04" + roles: + - { role: provision_docker, provision_docker_company: 'ansible', provision_docker_inventory: "{{ inventory }}" } + +- name: Run ansible_deps Tests + hosts: docker_containers + vars: + git_dir: "/tmp/ansible" + roles: + - { role: ansible_deps } + tasks: + - name: Clone ansible + git: + repo: "https://github.com/ansible/ansible.git" + dest: "{{ git_dir }}" + - name: Invoke ansible in hacking mode + shell: "cd {{ git_dir }} && . hacking/env-setup && ansible --version && ansible-playbook --version" + diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/requirements.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/requirements.yml new file mode 100644 index 00000000000..fa10641a72e --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/requirements.yml @@ -0,0 +1,2 @@ +- src: chrismeyersfsu.provision_docker + name: provision_docker diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/vars/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/vars/main.yml new file mode 100644 index 00000000000..a38c5fb0425 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/vars/main.yml @@ -0,0 +1,2 @@ +--- +# vars file for . diff --git a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml new file mode 100644 index 00000000000..2114567d152 --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml @@ -0,0 +1,20 @@ +--- +- name: Sync ansible repo to ec2 instance + synchronize: + src: "{{ sync_dir }}/" + dest: "~/ansible" + +- name: Get ansible source dir + sudo: false + shell: "cd ~ && pwd" + register: results + +- shell: ". hacking/env-setup && cd test/integration && make {{ run_integration_make_target }}" + sudo: true + environment: + TEST_FLAGS: "{{ run_integration_test_flags|default(lookup('env', 'TEST_FLAGS')) }}" + CREDENTIALS_FILE: "{{ run_integration_credentials_file|default(lookup('env', 'CREDENTIALS_FILE')) }}" + args: + chdir: "{{ results.stdout }}/ansible" + register: test_results + ignore_errors: true From 822624d061c55c5386e260b67d923627df3394fd Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 8 Dec 2015 14:05:57 -0500 Subject: [PATCH 0048/1113] rename role ansible_deps to ansible_test_deps --- .../roles/{ansible_deps => ansible_test_deps}/.gitignore | 0 .../roles/{ansible_deps => ansible_test_deps}/.travis.yml | 0 .../roles/{ansible_deps => ansible_test_deps}/README.md | 0 .../roles/{ansible_deps => ansible_test_deps}/defaults/main.yml | 0 .../roles/{ansible_deps => ansible_test_deps}/handlers/main.yml | 0 .../{ansible_deps => ansible_test_deps}/meta/.galaxy_install_info | 0 .../roles/{ansible_deps => ansible_test_deps}/meta/main.yml | 0 .../roles/{ansible_deps => ansible_test_deps}/tasks/main.yml | 0 .../roles/{ansible_deps => ansible_test_deps}/test/inventory | 0 .../roles/{ansible_deps => ansible_test_deps}/test/main.yml | 0 .../{ansible_deps => ansible_test_deps}/test/requirements.yml | 0 .../roles/{ansible_deps => ansible_test_deps}/vars/main.yml | 0 12 files changed, 0 insertions(+), 0 deletions(-) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/.gitignore (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/.travis.yml (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/README.md (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/defaults/main.yml (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/handlers/main.yml (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/meta/.galaxy_install_info (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/meta/main.yml (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/tasks/main.yml (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/test/inventory (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/test/main.yml (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/test/requirements.yml (100%) rename test/utils/ansible-playbook_integration_runner/roles/{ansible_deps => ansible_test_deps}/vars/main.yml (100%) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.gitignore b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/.gitignore similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.gitignore rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/.gitignore diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.travis.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/.travis.yml similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/.travis.yml rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/.travis.yml diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/README.md b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/README.md similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/README.md rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/README.md diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/defaults/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/defaults/main.yml similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/defaults/main.yml rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/defaults/main.yml diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/handlers/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/handlers/main.yml similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/handlers/main.yml rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/handlers/main.yml diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/.galaxy_install_info b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/meta/.galaxy_install_info similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/.galaxy_install_info rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/meta/.galaxy_install_info diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/meta/main.yml similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/meta/main.yml rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/meta/main.yml diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/tasks/main.yml rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/inventory b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/inventory similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/inventory rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/inventory diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/main.yml similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/main.yml rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/main.yml diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/requirements.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/requirements.yml similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/test/requirements.yml rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/requirements.yml diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_deps/vars/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/vars/main.yml similarity index 100% rename from test/utils/ansible-playbook_integration_runner/roles/ansible_deps/vars/main.yml rename to test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/vars/main.yml From de690445bca1f47e773e43b6cd6f1ed0b2ec278b Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Tue, 8 Dec 2015 14:00:17 -0500 Subject: [PATCH 0049/1113] Make fact delegating configurable, defaulting to 1.x behavior --- lib/ansible/playbook/block.py | 1 + lib/ansible/playbook/role/__init__.py | 1 + lib/ansible/playbook/role/include.py | 3 ++- lib/ansible/playbook/task.py | 1 + lib/ansible/plugins/strategy/__init__.py | 2 +- 5 files changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py index 0de5e635e7e..e842883bc82 100644 --- a/lib/ansible/playbook/block.py +++ b/lib/ansible/playbook/block.py @@ -34,6 +34,7 @@ class Block(Base, Become, Conditional, Taggable): _rescue = FieldAttribute(isa='list', default=[]) _always = FieldAttribute(isa='list', default=[]) _delegate_to = FieldAttribute(isa='list') + _delegate_facts = FieldAttribute(isa='bool', defalt=False) # for future consideration? this would be functionally # similar to the 'else' clause for exceptions diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py index 3cb914689fe..bd7760d221c 100644 --- a/lib/ansible/playbook/role/__init__.py +++ b/lib/ansible/playbook/role/__init__.py @@ -61,6 +61,7 @@ def hash_params(params): class Role(Base, Become, Conditional, Taggable): _delegate_to = FieldAttribute(isa='string') + _delegate_facts = FieldAttribute(isa='bool', defalt=False) def __init__(self, play=None): self._role_name = None diff --git a/lib/ansible/playbook/role/include.py b/lib/ansible/playbook/role/include.py index 67949e2e124..6e89eb33343 100644 --- a/lib/ansible/playbook/role/include.py +++ b/lib/ansible/playbook/role/include.py @@ -40,7 +40,8 @@ class RoleInclude(RoleDefinition): is included for execution in a play. """ - _delegate_to = FieldAttribute(isa='string') + _delegate_to = FieldAttribute(isa='string') + _delegate_facts = FieldAttribute(isa='bool', defalt=False) def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None): super(RoleInclude, self).__init__(play=play, role_basedir=role_basedir, variable_manager=variable_manager, loader=loader) diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index 21dbc87becf..6c7730cb2a5 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -72,6 +72,7 @@ class Task(Base, Conditional, Taggable, Become): _changed_when = FieldAttribute(isa='string') _delay = FieldAttribute(isa='int', default=5) _delegate_to = FieldAttribute(isa='string') + _delegate_facts = FieldAttribute(isa='bool', defalt=False) _failed_when = FieldAttribute(isa='string') _first_available_file = FieldAttribute(isa='list') _loop = FieldAttribute(isa='string', private=True) diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index 0d0cc4a9dce..732a9293d28 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -289,7 +289,7 @@ class StrategyBase: # find the host we're actually refering too here, which may # be a host that is not really in inventory at all - if task.delegate_to is not None: + if task.delegate_to is not None and task.delegate_facts: task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task) self.add_tqm_variables(task_vars, play=iterator._play) if item is not None: From 398f6bbb89ebdcd3ef0efdbc26d54801a0eb2e55 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Tue, 8 Dec 2015 14:34:37 -0500 Subject: [PATCH 0050/1113] Fix typo from 5ae850c --- lib/ansible/playbook/block.py | 2 +- lib/ansible/playbook/role/__init__.py | 2 +- lib/ansible/playbook/role/include.py | 2 +- lib/ansible/playbook/task.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py index e842883bc82..f2d9c82833a 100644 --- a/lib/ansible/playbook/block.py +++ b/lib/ansible/playbook/block.py @@ -34,7 +34,7 @@ class Block(Base, Become, Conditional, Taggable): _rescue = FieldAttribute(isa='list', default=[]) _always = FieldAttribute(isa='list', default=[]) _delegate_to = FieldAttribute(isa='list') - _delegate_facts = FieldAttribute(isa='bool', defalt=False) + _delegate_facts = FieldAttribute(isa='bool', default=False) # for future consideration? this would be functionally # similar to the 'else' clause for exceptions diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py index bd7760d221c..1c6b344a4fc 100644 --- a/lib/ansible/playbook/role/__init__.py +++ b/lib/ansible/playbook/role/__init__.py @@ -61,7 +61,7 @@ def hash_params(params): class Role(Base, Become, Conditional, Taggable): _delegate_to = FieldAttribute(isa='string') - _delegate_facts = FieldAttribute(isa='bool', defalt=False) + _delegate_facts = FieldAttribute(isa='bool', default=False) def __init__(self, play=None): self._role_name = None diff --git a/lib/ansible/playbook/role/include.py b/lib/ansible/playbook/role/include.py index 6e89eb33343..43e2d9e4fc1 100644 --- a/lib/ansible/playbook/role/include.py +++ b/lib/ansible/playbook/role/include.py @@ -41,7 +41,7 @@ class RoleInclude(RoleDefinition): """ _delegate_to = FieldAttribute(isa='string') - _delegate_facts = FieldAttribute(isa='bool', defalt=False) + _delegate_facts = FieldAttribute(isa='bool', default=False) def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None): super(RoleInclude, self).__init__(play=play, role_basedir=role_basedir, variable_manager=variable_manager, loader=loader) diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index 6c7730cb2a5..17f1952e39c 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -72,7 +72,7 @@ class Task(Base, Conditional, Taggable, Become): _changed_when = FieldAttribute(isa='string') _delay = FieldAttribute(isa='int', default=5) _delegate_to = FieldAttribute(isa='string') - _delegate_facts = FieldAttribute(isa='bool', defalt=False) + _delegate_facts = FieldAttribute(isa='bool', default=False) _failed_when = FieldAttribute(isa='string') _first_available_file = FieldAttribute(isa='list') _loop = FieldAttribute(isa='string', private=True) From ec5827c22a1f238591c4c21413bf690ceb83aa1f Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 8 Dec 2015 11:52:59 -0800 Subject: [PATCH 0051/1113] updated with delegate_facts directive --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36886531bb5..3d31ef4ebb2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,7 +33,7 @@ Ansible Changes By Release by setting the `ANSIBLE_NULL_REPRESENTATION` environment variable. * Added `meta: refresh_inventory` to force rereading the inventory in a play. This re-executes inventory scripts, but does not force them to ignore any cache they might use. -* Now when you delegate an action that returns ansible_facts, these facts will be applied to the delegated host, unlike before when they were applied to the current host. +* New delegate_facts directive, a boolean that allows you to apply facts to the delegated host (true/yes) instead of the inventory_hostname (no/false) which is the default and previous behaviour. * local connections now work with 'su' as a privilege escalation method * New ssh configuration variables(`ansible_ssh_common_args`, `ansible_ssh_extra_args`) can be used to configure a per-group or per-host ssh ProxyCommand or set any other ssh options. From 795fac917ea5970fd9583a41dad7a6d33a626b75 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 8 Dec 2015 11:59:04 -0800 Subject: [PATCH 0052/1113] fixed typo in tree callback, added default dir this would allow it to work with playbooks also --- lib/ansible/plugins/callback/tree.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/ansible/plugins/callback/tree.py b/lib/ansible/plugins/callback/tree.py index 8b1118864ec..b6ecd6de878 100644 --- a/lib/ansible/plugins/callback/tree.py +++ b/lib/ansible/plugins/callback/tree.py @@ -41,7 +41,8 @@ class CallbackModule(CallbackBase): self.tree = TREE_DIR if not self.tree: - self._display.warnings("Disabling tree callback, invalid directory provided to tree option: %s" % self.tree) + self.tree = os.path.expanduser("~/.ansible/tree") + self._display.warning("Defaulting to ~/.ansible/tree, invalid directory provided to tree option: %s" % self.tree) def write_tree_file(self, hostname, buf): ''' write something into treedir/hostname ''' @@ -53,7 +54,7 @@ class CallbackModule(CallbackBase): with open(path, 'wb+') as fd: fd.write(buf) except (OSError, IOError) as e: - self._display.warnings("Unable to write to %s's file: %s" % (hostname, str(e))) + self._display.warning("Unable to write to %s's file: %s" % (hostname, str(e))) def result_to_tree(self, result): if self.tree: From 1799de8528926355f51f79f705a6927a05ba018a Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Tue, 8 Dec 2015 15:02:25 -0500 Subject: [PATCH 0053/1113] Preserve original token when appending to _raw_params in parse_kv Fixes #13311 --- lib/ansible/parsing/splitter.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/ansible/parsing/splitter.py b/lib/ansible/parsing/splitter.py index c506603acb5..f24d8ecf9de 100644 --- a/lib/ansible/parsing/splitter.py +++ b/lib/ansible/parsing/splitter.py @@ -65,8 +65,8 @@ def parse_kv(args, check_raw=False): raise raw_params = [] - for x in vargs: - x = _decode_escapes(x) + for orig_x in vargs: + x = _decode_escapes(orig_x) if "=" in x: pos = 0 try: @@ -90,7 +90,7 @@ def parse_kv(args, check_raw=False): else: options[k.strip()] = unquote(v.strip()) else: - raw_params.append(x) + raw_params.append(orig_x) # recombine the free-form params, if any were found, and assign # them to a special option for use later by the shell/command module From 0e55398e16de1ca99dbe2115a4809c57cdbb5150 Mon Sep 17 00:00:00 2001 From: Jeremy Audet Date: Tue, 8 Dec 2015 09:39:45 -0500 Subject: [PATCH 0054/1113] Make "make webdocs" compatible with Python 3 The `webdocs` make target fails under Python 3. It fails due to a variety of syntax errors, such as the use of `except Foo, e` and `print 'foo'`. Fix #13463 by making code compatible with both Python 2 and 3. --- docsite/build-site.py | 23 ++++++++++++----------- hacking/module_formatter.py | 4 ++-- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/docsite/build-site.py b/docsite/build-site.py index 587a189f077..24f9fc9a647 100755 --- a/docsite/build-site.py +++ b/docsite/build-site.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . +from __future__ import print_function __docformat__ = 'restructuredtext' @@ -24,9 +25,9 @@ import traceback try: from sphinx.application import Sphinx except ImportError: - print "#################################" - print "Dependency missing: Python Sphinx" - print "#################################" + print("#################################") + print("Dependency missing: Python Sphinx") + print("#################################") sys.exit(1) import os @@ -40,7 +41,7 @@ class SphinxBuilder(object): """ Run the DocCommand. """ - print "Creating html documentation ..." + print("Creating html documentation ...") try: buildername = 'html' @@ -69,10 +70,10 @@ class SphinxBuilder(object): app.builder.build_all() - except ImportError, ie: + except ImportError: traceback.print_exc() - except Exception, ex: - print >> sys.stderr, "FAIL! exiting ... (%s)" % ex + except Exception as ex: + print("FAIL! exiting ... (%s)" % ex, file=sys.stderr) def build_docs(self): self.app.builder.build_all() @@ -83,9 +84,9 @@ def build_rst_docs(): if __name__ == '__main__': if '-h' in sys.argv or '--help' in sys.argv: - print "This script builds the html documentation from rst/asciidoc sources.\n" - print " Run 'make docs' to build everything." - print " Run 'make viewdocs' to build and then preview in a web browser." + print("This script builds the html documentation from rst/asciidoc sources.\n") + print(" Run 'make docs' to build everything.") + print(" Run 'make viewdocs' to build and then preview in a web browser.") sys.exit(0) build_rst_docs() @@ -93,4 +94,4 @@ if __name__ == '__main__': if "view" in sys.argv: import webbrowser if not webbrowser.open('htmlout/index.html'): - print >> sys.stderr, "Could not open on your webbrowser." + print("Could not open on your webbrowser.", file=sys.stderr) diff --git a/hacking/module_formatter.py b/hacking/module_formatter.py index f4ab5d7d9ab..4c94ca3f2c4 100755 --- a/hacking/module_formatter.py +++ b/hacking/module_formatter.py @@ -140,7 +140,7 @@ def list_modules(module_dir, depth=0): if os.path.isdir(d): res = list_modules(d, depth + 1) - for key in res.keys(): + for key in list(res.keys()): if key in categories: categories[key] = merge_hash(categories[key], res[key]) res.pop(key, None) @@ -451,7 +451,7 @@ def main(): categories = list_modules(options.module_dir) last_category = None - category_names = categories.keys() + category_names = list(categories.keys()) category_names.sort() category_list_path = os.path.join(options.output_dir, "modules_by_category.rst") From 021605a19578309cccc5cdec8c47c512b819d7e0 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 12 Nov 2015 18:42:39 -0800 Subject: [PATCH 0055/1113] keep string type filters as strings now we don't try to convert types if using a filter that outputs a specifically formated string made list of filters configurable --- lib/ansible/constants.py | 1 + lib/ansible/template/__init__.py | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 6faae928dbe..0f809db7297 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -261,6 +261,7 @@ GALAXY_SCMS = get_config(p, 'galaxy', 'scms', 'ANSIBLE_GALAXY # characters included in auto-generated passwords DEFAULT_PASSWORD_CHARS = ascii_letters + digits + ".,:-_" +STRING_TYPE_FILTERS = get_config(p, 'jinja2', 'dont_type_filters', 'ANSIBLE_STRING_TYPE_FILTERS', ['string', 'to_json', 'to_nice_json', 'to_yaml', 'ppretty', 'json'], islist=True ) # non-configurable things MODULE_REQUIRE_ARGS = ['command', 'shell', 'raw', 'script'] diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py index bdd0612bddd..8ce2358eb1e 100644 --- a/lib/ansible/template/__init__.py +++ b/lib/ansible/template/__init__.py @@ -164,7 +164,8 @@ class Templar: self.block_end = self.environment.block_end_string self.variable_start = self.environment.variable_start_string self.variable_end = self.environment.variable_end_string - self._clean_regex = re.compile(r'(?:%s[%s%s]|[%s%s]%s)' % (self.variable_start[0], self.variable_start[1], self.block_start[1], self.block_end[0], self.variable_end[0], self.variable_end[1])) + self._clean_regex = re.compile(r'(?:%s|%s|%s|%s)' % (self.variable_start, self.block_start, self.block_end, self.variable_end)) + self._no_type_regex = re.compile(r'.*\|(?:%s)\s*(?:%s)?$' % ('|'.join(C.STRING_TYPE_FILTERS), self.variable_end)) def _get_filters(self): ''' @@ -278,8 +279,7 @@ class Templar: if fail_on_undefined is None: fail_on_undefined = self._fail_on_undefined_errors - # Don't template unsafe variables, instead drop them back down to - # their constituent type. + # Don't template unsafe variables, instead drop them back down to their constituent type. if hasattr(variable, '__UNSAFE__'): if isinstance(variable, text_type): return self._clean_data(text_type(variable)) @@ -294,6 +294,7 @@ class Templar: if isinstance(variable, string_types): result = variable + if self._contains_vars(variable): # Check to see if the string we are trying to render is just referencing a single @@ -319,7 +320,7 @@ class Templar: result = self._cached_result[sha1_hash] else: result = self._do_template(variable, preserve_trailing_newlines=preserve_trailing_newlines, escape_backslashes=escape_backslashes, fail_on_undefined=fail_on_undefined, overrides=overrides) - if convert_data: + if convert_data and not self._no_type_regex.match(variable): # if this looks like a dictionary or list, convert it to such using the safe_eval method if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \ result.startswith("[") or result in ("True", "False"): From d82d65ee7bd2506e06ffb225a2e9be6fa1ac36db Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 12 Nov 2015 18:42:39 -0800 Subject: [PATCH 0056/1113] keep string type filters as strings now we don't try to convert types if using a filter that outputs a specifically formated string made list of filters configurable --- lib/ansible/constants.py | 1 + lib/ansible/template/__init__.py | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 6faae928dbe..0f809db7297 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -261,6 +261,7 @@ GALAXY_SCMS = get_config(p, 'galaxy', 'scms', 'ANSIBLE_GALAXY # characters included in auto-generated passwords DEFAULT_PASSWORD_CHARS = ascii_letters + digits + ".,:-_" +STRING_TYPE_FILTERS = get_config(p, 'jinja2', 'dont_type_filters', 'ANSIBLE_STRING_TYPE_FILTERS', ['string', 'to_json', 'to_nice_json', 'to_yaml', 'ppretty', 'json'], islist=True ) # non-configurable things MODULE_REQUIRE_ARGS = ['command', 'shell', 'raw', 'script'] diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py index bdd0612bddd..8ce2358eb1e 100644 --- a/lib/ansible/template/__init__.py +++ b/lib/ansible/template/__init__.py @@ -164,7 +164,8 @@ class Templar: self.block_end = self.environment.block_end_string self.variable_start = self.environment.variable_start_string self.variable_end = self.environment.variable_end_string - self._clean_regex = re.compile(r'(?:%s[%s%s]|[%s%s]%s)' % (self.variable_start[0], self.variable_start[1], self.block_start[1], self.block_end[0], self.variable_end[0], self.variable_end[1])) + self._clean_regex = re.compile(r'(?:%s|%s|%s|%s)' % (self.variable_start, self.block_start, self.block_end, self.variable_end)) + self._no_type_regex = re.compile(r'.*\|(?:%s)\s*(?:%s)?$' % ('|'.join(C.STRING_TYPE_FILTERS), self.variable_end)) def _get_filters(self): ''' @@ -278,8 +279,7 @@ class Templar: if fail_on_undefined is None: fail_on_undefined = self._fail_on_undefined_errors - # Don't template unsafe variables, instead drop them back down to - # their constituent type. + # Don't template unsafe variables, instead drop them back down to their constituent type. if hasattr(variable, '__UNSAFE__'): if isinstance(variable, text_type): return self._clean_data(text_type(variable)) @@ -294,6 +294,7 @@ class Templar: if isinstance(variable, string_types): result = variable + if self._contains_vars(variable): # Check to see if the string we are trying to render is just referencing a single @@ -319,7 +320,7 @@ class Templar: result = self._cached_result[sha1_hash] else: result = self._do_template(variable, preserve_trailing_newlines=preserve_trailing_newlines, escape_backslashes=escape_backslashes, fail_on_undefined=fail_on_undefined, overrides=overrides) - if convert_data: + if convert_data and not self._no_type_regex.match(variable): # if this looks like a dictionary or list, convert it to such using the safe_eval method if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \ result.startswith("[") or result in ("True", "False"): From c1cec64aa8372f2e7d565a2717c68a075836ae9b Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 8 Dec 2015 14:18:11 -0800 Subject: [PATCH 0057/1113] added delegate_facts docs --- docsite/rst/playbooks_delegation.rst | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docsite/rst/playbooks_delegation.rst b/docsite/rst/playbooks_delegation.rst index 4411e4aa29f..4e2e8c372ac 100644 --- a/docsite/rst/playbooks_delegation.rst +++ b/docsite/rst/playbooks_delegation.rst @@ -130,6 +130,29 @@ Here is an example:: Note that you must have passphrase-less SSH keys or an ssh-agent configured for this to work, otherwise rsync will need to ask for a passphrase. +.. _delegate_facts: + +Delegated facts +``````````````` + +.. versionadded:: 2.0 + +Before 2.0 any facts gathered by a delegated task were assigned to the `inventory_hostname` (current host) instead of the host which actually produced the facts (delegated to host). +The new directive `delegate_facts` if set to `True` will assing the task's gathered facts to the delegated host instead of the current one.:: + + + - hosts: app_servers + tasks: + - name: gather facts from db servers + setup: + delegate_to: "{{item}}" + delegate_facts: True + with_items: "{{groups['dbservers'}}" + +The above will gather facts for the machines in the dbservers group and assign the facts to those machines and not to app_servers, +that way you can lookup `hostvars['dbhost1']['default_ipv4_addresses'][0]` even though dbservers were not part of the play, or left out by using `--limit`. + + .. _run_once: Run Once From ea72fd65474d52936523c9cb3c12c3827f8438f1 Mon Sep 17 00:00:00 2001 From: = Date: Wed, 9 Dec 2015 08:57:06 +0000 Subject: [PATCH 0058/1113] adding integration tests for win_regmerge module (extras) --- .../test_win_regmerge/files/settings1.reg | Bin 0 -> 374 bytes .../test_win_regmerge/files/settings2.reg | Bin 0 -> 760 bytes .../test_win_regmerge/files/settings3.reg | Bin 0 -> 1926 bytes .../roles/test_win_regmerge/meta/main.yml | 3 + .../roles/test_win_regmerge/tasks/main.yml | 133 ++++++++++++++++++ .../templates/win_line_ending.j2 | 4 + .../roles/test_win_regmerge/vars/main.yml | 1 + test/integration/test_winrm.yml | 1 + 8 files changed, 142 insertions(+) create mode 100644 test/integration/roles/test_win_regmerge/files/settings1.reg create mode 100644 test/integration/roles/test_win_regmerge/files/settings2.reg create mode 100644 test/integration/roles/test_win_regmerge/files/settings3.reg create mode 100644 test/integration/roles/test_win_regmerge/meta/main.yml create mode 100644 test/integration/roles/test_win_regmerge/tasks/main.yml create mode 100644 test/integration/roles/test_win_regmerge/templates/win_line_ending.j2 create mode 100644 test/integration/roles/test_win_regmerge/vars/main.yml diff --git a/test/integration/roles/test_win_regmerge/files/settings1.reg b/test/integration/roles/test_win_regmerge/files/settings1.reg new file mode 100644 index 0000000000000000000000000000000000000000..baec75b2af0ee7f806f7e51fc362da820d60e4f6 GIT binary patch literal 374 zcmZXQO-sX25Jk^g@IQpCbR*(N5y6E-gF&lOt3j-kQj3JvL{d@v=hc(Zojiu&e!O{i z-uG8YMa>fpA1p~2FymQn$r~*znN!tD)QA)A)LYd`T#NVFV%xLMTGRt)oO|bMLFPxxU)%PVB9Y>EBi>QjV;QL+6dSR&DgPOn7m}T>nCU_dgqaazKyG XaQ@HMrJF?ZTeIfQSp;gspGKY^4^lt~ literal 0 HcmV?d00001 diff --git a/test/integration/roles/test_win_regmerge/files/settings2.reg b/test/integration/roles/test_win_regmerge/files/settings2.reg new file mode 100644 index 0000000000000000000000000000000000000000..fc2612cb8a8a3937b72e36400b434bac8753e02f GIT binary patch literal 760 zcma))TT4Pw5QW!s(0@4iEaB}a1UQ|vi*tOJ!8``YvQpfg=? z=U#nXs;xxF?0vP^6MW~o!uzN$zEj<(2i|d{=Njs&sj3Q58F^D86UQUpbG?EzJ@*o} zf!>KRX4Buop6khV6o^VS0(sL5>O(}Aimb9!GZl2C38zmTqQ9;pv&`Bcdy$#?YUj15 z?om}(&0k`TQETTa>$(funXl21qrv*OnKNXt%q8b6>)APtEp{yd*~NTIhpb1Nyg^^Q zQW@AyW$Y>|t4srshSyW`Of|4KM?vA1Dpp)32BqOvpm0(fXeH|+etI^ZkiyQE*(Gpqx?6TVmV zQJ)wewBYkdRqfz&sAK(9LuGBU9;?n;##YlV)>ut!TWYDz{=VMo_P8zLR*6&R{z%`LWo_Ueb7nx6C+d*bSAD_q z_V{C}(V?%X+RTtBlR}`kq6d4H^p_{{8o$SO+j*{}T3(^grOOqR`a4)hyn7xKt{f literal 0 HcmV?d00001 diff --git a/test/integration/roles/test_win_regmerge/meta/main.yml b/test/integration/roles/test_win_regmerge/meta/main.yml new file mode 100644 index 00000000000..55200b3fc64 --- /dev/null +++ b/test/integration/roles/test_win_regmerge/meta/main.yml @@ -0,0 +1,3 @@ +dependencies: + - prepare_win_tests + diff --git a/test/integration/roles/test_win_regmerge/tasks/main.yml b/test/integration/roles/test_win_regmerge/tasks/main.yml new file mode 100644 index 00000000000..6e64c9dd4a7 --- /dev/null +++ b/test/integration/roles/test_win_regmerge/tasks/main.yml @@ -0,0 +1,133 @@ +# test code for the win_regmerge module +# (c) 2014, Michael DeHaan + +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +# clear the area of the registry we are using for tests +- name: remove setting + win_regedit: + key: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp' + state: absent + +# copy over some registry files to work with +- name: copy over some registry files to work with + win_copy: src={{item}} dest={{win_output_dir}}\\{{item}} + with_items: + - settings1.reg + - settings2.reg + - settings3.reg + +# test 1 - basic test of changed behaviour +# merge in REG_SZ +- name: test 1 merge in a setting + win_regmerge: + path: "{{win_output_dir}}\\settings1.reg" + register: merge11_result + +- assert: + that: + - "merge11_result.changed == true" + +# re run the merge +- name: test 1 merge in the setting again + win_regmerge: + path: "{{win_output_dir}}\\settings1.reg" + register: merge12_result + +# without a compare to key, should allways report changed +- assert: + that: + - "merge12_result.changed == true" +# assert changed false + +# prune reg key +- name: test 1 remove setting + win_regedit: + key: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp' + state: absent + +# +# test 2, observe behaviour when compare_to param is set +# +- name: test 2 merge in a setting + win_regmerge: + path: "{{win_output_dir}}\\settings1.reg" + compare_to: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp\Moosic\ILikeToMooveIt' + register: merge21_result + +- assert: + that: + - "merge21_result.changed == true" + +# re run the merge +- name: test 2 merge in the setting again but with compare_key + win_regmerge: + path: "{{win_output_dir}}\\settings1.reg" + compare_to: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp\Moosic\ILikeToMooveIt' + register: merge22_result + +# with a compare to key, should now report not changed +- assert: + that: + - "merge22_result.changed == false" +# assert changed false + +# prune the contents of the registry from the parent of the compare key downwards +- name: test 2 clean up remove setting + win_regedit: + key: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp' + state: absent + +# test 3 merge in more complex settings +- name: test 3 merge in a setting + win_regmerge: + path: "{{win_output_dir}}\\settings3.reg" + compare_to: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp\Moo Monitor' + register: merge31_result + +- assert: + that: + - "merge31_result.changed == true" + +# re run the merge +- name: test 3 merge in the setting again but with compare_key check + win_regmerge: + path: "{{win_output_dir}}\\settings3.reg" + compare_to: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp\Moo Monitor' + register: merge32_result + +# with a compare to key, should now report not changed +- assert: + that: + - "merge32_result.changed == false" +# assert changed false + +# prune the contents of the registry from the compare key downwards +- name: test 3 clean up remove setting + win_regedit: + key: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp' + state: absent + +# clean up registry files + +- name: clean up registry files + win_file: path={{win_output_dir}}\\{{item}} state=absent + with_items: + - settings1.reg + - settings2.reg + - settings3.reg + +# END OF win_regmerge tests diff --git a/test/integration/roles/test_win_regmerge/templates/win_line_ending.j2 b/test/integration/roles/test_win_regmerge/templates/win_line_ending.j2 new file mode 100644 index 00000000000..d0cefd76f49 --- /dev/null +++ b/test/integration/roles/test_win_regmerge/templates/win_line_ending.j2 @@ -0,0 +1,4 @@ +#jinja2: newline_sequence:'\r\n' +{{ templated_var }} +{{ templated_var }} +{{ templated_var }} diff --git a/test/integration/roles/test_win_regmerge/vars/main.yml b/test/integration/roles/test_win_regmerge/vars/main.yml new file mode 100644 index 00000000000..1e8f64ccf44 --- /dev/null +++ b/test/integration/roles/test_win_regmerge/vars/main.yml @@ -0,0 +1 @@ +templated_var: templated_var_loaded diff --git a/test/integration/test_winrm.yml b/test/integration/test_winrm.yml index f11171faf8c..51a5daa51fb 100644 --- a/test/integration/test_winrm.yml +++ b/test/integration/test_winrm.yml @@ -37,4 +37,5 @@ - { role: test_win_copy, tags: test_win_copy } - { role: test_win_template, tags: test_win_template } - { role: test_win_lineinfile, tags: test_win_lineinfile } + - { role: test_win_regmerge, tags: test_win_regmerge } From 57391f49ba5e7692e50e4e43ed9c541511eb0936 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Wed, 9 Dec 2015 07:52:43 -0500 Subject: [PATCH 0059/1113] removed ansible_python_interpreter * added missed renames of ansible_deps to ansible_test_deps * removed acidential inventory.dynamic file * modified README for ansible_test_deps role --- .../ansible-playbook_integration_runner/inventory | 2 +- .../inventory.dynamic | 3 --- .../utils/ansible-playbook_integration_runner/main.yml | 2 +- .../roles/ansible_test_deps/README.md | 6 ++---- .../roles/ansible_test_deps/test/main.yml | 10 +++++----- 5 files changed, 9 insertions(+), 14 deletions(-) delete mode 100644 test/utils/ansible-playbook_integration_runner/inventory.dynamic diff --git a/test/utils/ansible-playbook_integration_runner/inventory b/test/utils/ansible-playbook_integration_runner/inventory index 42de3a1b5d7..2302edae31b 100644 --- a/test/utils/ansible-playbook_integration_runner/inventory +++ b/test/utils/ansible-playbook_integration_runner/inventory @@ -1 +1 @@ -localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env python" +localhost ansible_connection=local diff --git a/test/utils/ansible-playbook_integration_runner/inventory.dynamic b/test/utils/ansible-playbook_integration_runner/inventory.dynamic deleted file mode 100644 index 1aa03b4ed8d..00000000000 --- a/test/utils/ansible-playbook_integration_runner/inventory.dynamic +++ /dev/null @@ -1,3 +0,0 @@ -localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env python" -[dynamic_hosts] -54.157.26.110 ansible_ssh_user=root ansible_ssh_private_key_file=/Users/meyers/Dropbox/.ssh/Ansible_chris_meyers.pem diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml index 8661a6dba9e..5d15541490f 100644 --- a/test/utils/ansible-playbook_integration_runner/main.yml +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -36,7 +36,7 @@ make_target: "non_destructive" #pre_tasks: roles: - - { role: ansible_deps, tags: ansible_deps } + - { role: ansible_test_deps, tags: ansible_test_deps } - { role: run_integration, tags: run_integration, run_integration_test_flags: "{{ test_flags }}", diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/README.md b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/README.md index f0fc755863c..09ffacacaf5 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/README.md +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/README.md @@ -1,8 +1,6 @@ -[![Build Status](https://travis-ci.org/chrismeyersfsu/role-ansible_deps.svg)](https://travis-ci.org/chrismeyersfsu/role-ansible_deps) +[![Build Status](https://travis-ci.org/chrismeyersfsu/ansible_test_deps.svg)](https://travis-ci.org/chrismeyersfsu/ansible_test_deps) -ansible_deps +ansible_test_deps ========= Install needed packages to run ansible integration tests. - -This role is periodically synced from ansible core repo to chrismeyersfsu/role-ansible_deps so that automated tests may run and so this role is accessible from galaxy. diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/main.yml index 95617dbfac3..b66d699d5d6 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/main.yml @@ -4,21 +4,21 @@ gather_facts: false vars: inventory: - - name: ansible_deps_host_1 + - name: ansible_test_deps_host_1 image: "chrismeyers/centos6" - - name: ansible_deps_host_2 + - name: ansible_test_deps_host_2 image: "chrismeyers/ubuntu12.04" - - name: ansible_deps_host_3 + - name: ansible_test_deps_host_3 image: "ubuntu-upstart:14.04" roles: - { role: provision_docker, provision_docker_company: 'ansible', provision_docker_inventory: "{{ inventory }}" } -- name: Run ansible_deps Tests +- name: Run ansible_test_deps Tests hosts: docker_containers vars: git_dir: "/tmp/ansible" roles: - - { role: ansible_deps } + - { role: ansible_test_deps } tasks: - name: Clone ansible git: From f16628ffecfa5ece0535c9b1c3de78cc78e18575 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Wed, 9 Dec 2015 09:37:39 -0500 Subject: [PATCH 0060/1113] symbolic link role for testing --- .../roles/ansible_test_deps/test/roles/ansible_test_deps | 1 + 1 file changed, 1 insertion(+) create mode 120000 test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/roles/ansible_test_deps diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/roles/ansible_test_deps b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/roles/ansible_test_deps new file mode 120000 index 00000000000..eb6d9edda4b --- /dev/null +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/test/roles/ansible_test_deps @@ -0,0 +1 @@ +../../../ansible_test_deps \ No newline at end of file From 8d66dcda21f176ee7cce21e99f52dea384ef42b8 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Wed, 9 Dec 2015 09:39:45 -0500 Subject: [PATCH 0061/1113] remove .gitignore --- .../roles/ansible_test_deps/.gitignore | 1 - 1 file changed, 1 deletion(-) delete mode 100644 test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/.gitignore diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/.gitignore b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/.gitignore deleted file mode 100644 index 1377554ebea..00000000000 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.swp From 0719eb3e2d798c6f80223e37dd77bc0ac41c537d Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 06:32:04 -0800 Subject: [PATCH 0062/1113] clarified warning from tree callback --- lib/ansible/plugins/callback/tree.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/callback/tree.py b/lib/ansible/plugins/callback/tree.py index b6ecd6de878..ee710a6dfdf 100644 --- a/lib/ansible/plugins/callback/tree.py +++ b/lib/ansible/plugins/callback/tree.py @@ -42,7 +42,7 @@ class CallbackModule(CallbackBase): self.tree = TREE_DIR if not self.tree: self.tree = os.path.expanduser("~/.ansible/tree") - self._display.warning("Defaulting to ~/.ansible/tree, invalid directory provided to tree option: %s" % self.tree) + self._display.warning("The tree callback is defaulting to ~/.ansible/tree, as an invalid directory was provided: %s" % self.tree) def write_tree_file(self, hostname, buf): ''' write something into treedir/hostname ''' From 87969868d42cd8aba1c65c8207d059d73407373b Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 07:21:00 -0800 Subject: [PATCH 0063/1113] avoid persistent containers in attribute defaults moved from the field attribute declaration and created a placeholder which then is resolved in the field attribute class. this is to avoid unwanted persistent of the defaults across objects which introduces stealth bugs when multiple objects of the same kind are used in succession while not overriding the default values. --- lib/ansible/playbook/attribute.py | 11 +++++++++++ lib/ansible/playbook/block.py | 6 +++--- lib/ansible/playbook/conditional.py | 2 +- lib/ansible/playbook/play.py | 16 ++++++++-------- lib/ansible/playbook/play_context.py | 4 ++-- lib/ansible/playbook/playbook_include.py | 2 +- lib/ansible/playbook/role/metadata.py | 2 +- lib/ansible/playbook/taggable.py | 2 +- lib/ansible/playbook/task.py | 2 +- 9 files changed, 29 insertions(+), 18 deletions(-) diff --git a/lib/ansible/playbook/attribute.py b/lib/ansible/playbook/attribute.py index 703d9dbca1e..ce7ed6d8fe7 100644 --- a/lib/ansible/playbook/attribute.py +++ b/lib/ansible/playbook/attribute.py @@ -32,6 +32,17 @@ class Attribute: self.priority = priority self.always_post_validate = always_post_validate + # This is here to avoid `default=` unwanted persistence across object instances + # We cannot rely on None as some fields use it to skip the code + # that would detect an empty container as a user error + if self.default == '_ansible_container': + if self.isa == 'list': + self.default = [] + elif self.isa == 'dict': + self.default = {} + elif self.isa == 'set': + self.default = set() + def __eq__(self, other): return other.priority == self.priority diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py index f2d9c82833a..66009b028af 100644 --- a/lib/ansible/playbook/block.py +++ b/lib/ansible/playbook/block.py @@ -30,9 +30,9 @@ from ansible.playbook.taggable import Taggable class Block(Base, Become, Conditional, Taggable): - _block = FieldAttribute(isa='list', default=[]) - _rescue = FieldAttribute(isa='list', default=[]) - _always = FieldAttribute(isa='list', default=[]) + _block = FieldAttribute(isa='list', default='_ansible_container') + _rescue = FieldAttribute(isa='list', default='_ansible_container') + _always = FieldAttribute(isa='list', default='_ansible_container') _delegate_to = FieldAttribute(isa='list') _delegate_facts = FieldAttribute(isa='bool', default=False) diff --git a/lib/ansible/playbook/conditional.py b/lib/ansible/playbook/conditional.py index fc178e2fa1d..a5b3ca725f8 100644 --- a/lib/ansible/playbook/conditional.py +++ b/lib/ansible/playbook/conditional.py @@ -33,7 +33,7 @@ class Conditional: to be run conditionally when a condition is met or skipped. ''' - _when = FieldAttribute(isa='list', default=[]) + _when = FieldAttribute(isa='list', default='_ansible_container') def __init__(self, loader=None): # when used directly, this class needs a loader, but we want to diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py index ed61416e951..e08c8c60016 100644 --- a/lib/ansible/playbook/play.py +++ b/lib/ansible/playbook/play.py @@ -64,22 +64,22 @@ class Play(Base, Taggable, Become): # Connection _gather_facts = FieldAttribute(isa='bool', default=None, always_post_validate=True) - _hosts = FieldAttribute(isa='list', default=[], required=True, listof=string_types, always_post_validate=True) + _hosts = FieldAttribute(isa='list', default='_ansible_container', required=True, listof=string_types, always_post_validate=True) _name = FieldAttribute(isa='string', default='', always_post_validate=True) # Variable Attributes - _vars_files = FieldAttribute(isa='list', default=[], priority=99) - _vars_prompt = FieldAttribute(isa='list', default=[], always_post_validate=True) + _vars_files = FieldAttribute(isa='list', default='_ansible_container', priority=99) + _vars_prompt = FieldAttribute(isa='list', default='_ansible_container', always_post_validate=True) _vault_password = FieldAttribute(isa='string', always_post_validate=True) # Role Attributes - _roles = FieldAttribute(isa='list', default=[], priority=90) + _roles = FieldAttribute(isa='list', default='_ansible_container', priority=90) # Block (Task) Lists Attributes - _handlers = FieldAttribute(isa='list', default=[]) - _pre_tasks = FieldAttribute(isa='list', default=[]) - _post_tasks = FieldAttribute(isa='list', default=[]) - _tasks = FieldAttribute(isa='list', default=[]) + _handlers = FieldAttribute(isa='list', default='_ansible_container') + _pre_tasks = FieldAttribute(isa='list', default='_ansible_container') + _post_tasks = FieldAttribute(isa='list', default='_ansible_container') + _tasks = FieldAttribute(isa='list', default='_ansible_container') # Flag/Setting Attributes _any_errors_fatal = FieldAttribute(isa='bool', default=False, always_post_validate=True) diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py index 81223500adf..da291c3c834 100644 --- a/lib/ansible/playbook/play_context.py +++ b/lib/ansible/playbook/play_context.py @@ -171,8 +171,8 @@ class PlayContext(Base): # general flags _verbosity = FieldAttribute(isa='int', default=0) - _only_tags = FieldAttribute(isa='set', default=set()) - _skip_tags = FieldAttribute(isa='set', default=set()) + _only_tags = FieldAttribute(isa='set', default='_ansible_container') + _skip_tags = FieldAttribute(isa='set', default='_ansible_container') _check_mode = FieldAttribute(isa='bool', default=False) _force_handlers = FieldAttribute(isa='bool', default=False) _start_at_task = FieldAttribute(isa='string') diff --git a/lib/ansible/playbook/playbook_include.py b/lib/ansible/playbook/playbook_include.py index d9af2ba5237..52081c41539 100644 --- a/lib/ansible/playbook/playbook_include.py +++ b/lib/ansible/playbook/playbook_include.py @@ -35,7 +35,7 @@ class PlaybookInclude(Base, Conditional, Taggable): _name = FieldAttribute(isa='string') _include = FieldAttribute(isa='string') - _vars = FieldAttribute(isa='dict', default=dict()) + _vars = FieldAttribute(isa='dict', default='_ansible_container') @staticmethod def load(data, basedir, variable_manager=None, loader=None): diff --git a/lib/ansible/playbook/role/metadata.py b/lib/ansible/playbook/role/metadata.py index 58b59145a1c..4bb7d0ce02b 100644 --- a/lib/ansible/playbook/role/metadata.py +++ b/lib/ansible/playbook/role/metadata.py @@ -40,7 +40,7 @@ class RoleMetadata(Base): ''' _allow_duplicates = FieldAttribute(isa='bool', default=False) - _dependencies = FieldAttribute(isa='list', default=[]) + _dependencies = FieldAttribute(isa='list', default='_ansible_container') _galaxy_info = FieldAttribute(isa='GalaxyInfo') def __init__(self, owner=None): diff --git a/lib/ansible/playbook/taggable.py b/lib/ansible/playbook/taggable.py index 8f5cfa09344..37e3261e80d 100644 --- a/lib/ansible/playbook/taggable.py +++ b/lib/ansible/playbook/taggable.py @@ -29,7 +29,7 @@ from ansible.template import Templar class Taggable: untagged = frozenset(['untagged']) - _tags = FieldAttribute(isa='list', default=[], listof=(string_types,int)) + _tags = FieldAttribute(isa='list', default='_ansible_container', listof=(string_types,int)) def __init__(self): super(Taggable, self).__init__() diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index 17f1952e39c..53a9a3c3931 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -64,7 +64,7 @@ class Task(Base, Conditional, Taggable, Become): # will be used if defined # might be possible to define others - _args = FieldAttribute(isa='dict', default=dict()) + _args = FieldAttribute(isa='dict', default='_ansible_container') _action = FieldAttribute(isa='string') _any_errors_fatal = FieldAttribute(isa='bool') From 4f84769a17bb92894ee31b08267cf9aec1c0118c Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Wed, 9 Dec 2015 10:51:12 -0500 Subject: [PATCH 0064/1113] Galaxy 2.0 --- docsite/rst/galaxy.rst | 291 ++++++++++++++++- lib/ansible/cli/galaxy.py | 326 ++++++++++++++++--- lib/ansible/constants.py | 3 +- lib/ansible/galaxy/__init__.py | 2 + lib/ansible/galaxy/api.py | 207 ++++++++---- lib/ansible/galaxy/data/metadata_template.j2 | 14 + lib/ansible/galaxy/data/test_playbook.j2 | 5 + lib/ansible/galaxy/data/travis.j2 | 29 ++ lib/ansible/galaxy/login.py | 113 +++++++ lib/ansible/galaxy/role.py | 10 +- lib/ansible/galaxy/token.py | 67 ++++ 11 files changed, 952 insertions(+), 115 deletions(-) create mode 100644 lib/ansible/galaxy/data/test_playbook.j2 create mode 100644 lib/ansible/galaxy/data/travis.j2 create mode 100644 lib/ansible/galaxy/login.py create mode 100644 lib/ansible/galaxy/token.py diff --git a/docsite/rst/galaxy.rst b/docsite/rst/galaxy.rst index 1b9475c418d..783ac15e456 100644 --- a/docsite/rst/galaxy.rst +++ b/docsite/rst/galaxy.rst @@ -8,7 +8,7 @@ Ansible Galaxy The Website ``````````` -The website `Ansible Galaxy `_, is a free site for finding, downloading, rating, and reviewing all kinds of community developed Ansible roles and can be a great way to get a jumpstart on your automation projects. +The website `Ansible Galaxy `_, is a free site for finding, downloading, and sharing community developed Ansible roles. Downloading roles from Galaxy is a great way to jumpstart your automation projects. You can sign up with social auth and use the download client 'ansible-galaxy' which is included in Ansible 1.4.2 and later. @@ -24,7 +24,7 @@ Installing Roles The most obvious is downloading roles from the Ansible Galaxy website:: - ansible-galaxy install username.rolename + $ ansible-galaxy install username.rolename .. _galaxy_cli_roles_path: @@ -33,23 +33,16 @@ roles_path You can specify a particular directory where you want the downloaded roles to be placed:: - ansible-galaxy install username.role -p ~/Code/ansible_roles/ + $ ansible-galaxy install username.role -p ~/Code/ansible_roles/ This can be useful if you have a master folder that contains ansible galaxy roles shared across several projects. The default is the roles_path configured in your ansible.cfg file (/etc/ansible/roles if not configured). -Building out Role Scaffolding ------------------------------ - -It can also be used to initialize the base structure of a new role, saving time on creating the various directories and main.yml files a role requires:: - - ansible-galaxy init rolename - Installing Multiple Roles From A File -------------------------------------- +===================================== To install multiple roles, the ansible-galaxy CLI can be fed a requirements file. All versions of ansible allow the following syntax for installing roles from the Ansible Galaxy website:: - ansible-galaxy install -r requirements.txt + $ ansible-galaxy install -r requirements.txt Where the requirements.txt looks like:: @@ -64,7 +57,7 @@ To request specific versions (tags) of a role, use this syntax in the roles file Available versions will be listed on the Ansible Galaxy webpage for that role. Advanced Control over Role Requirements Files ---------------------------------------------- +============================================= For more advanced control over where to download roles from, including support for remote repositories, Ansible 1.8 and later support a new YAML format for the role requirements file, which must end in a 'yml' extension. It works like this:: @@ -121,3 +114,275 @@ Roles pulled from galaxy work as with other SCM sourced roles above. To download `irc.freenode.net `_ #ansible IRC chat channel +Building Role Scaffolding +------------------------- + +Use the init command to initialize the base structure of a new role, saving time on creating the various directories and main.yml files a role requires:: + + $ ansible-galaxy init rolename + +The above will create the following directory structure in the current working directory: + +:: + + README.md + .travsis.yml + defaults/ + main.yml + files/ + handlers/ + main.yml + meta/ + main.yml + templates/ + tests/ + inventory + test.yml + vars/ + main.yml + +.. note:: + + .travis.yml and tests/ are new in Ansible 2.0 + +If a directory matching the name of the role already exists in the current working directory, the init command will result in an error. To ignore the error use the --force option. Force will create the above subdirectories and files, replacing anything that matches. + +Search for Roles +---------------- + +The search command provides for querying the Galaxy database, allowing for searching by tags, platforms, author and multiple keywords. For example: + +:: + + $ ansible-galaxy search elasticsearch --author geerlingguy + +The search command will return a list of the first 1000 results matching your search: + +:: + + Found 2 roles matching your search: + + Name Description + ---- ----------- + geerlingguy.elasticsearch Elasticsearch for Linux. + geerlingguy.elasticsearch-curator Elasticsearch curator for Linux. + +.. note:: + + The format of results pictured here is new in Ansible 2.0. + +Get More Information About a Role +--------------------------------- + +Use the info command To view more detail about a specific role: + +:: + + $ ansible-galaxy info username.role_name + +This returns everything found in Galaxy for the role: + +:: + + Role: username.rolename + description: Installs and configures a thing, a distributed, highly available NoSQL thing. + active: True + commit: c01947b7bc89ebc0b8a2e298b87ab416aed9dd57 + commit_message: Adding travis + commit_url: https://github.com/username/repo_name/commit/c01947b7bc89ebc0b8a2e298b87ab + company: My Company, Inc. + created: 2015-12-08T14:17:52.773Z + download_count: 1 + forks_count: 0 + github_branch: + github_repo: repo_name + github_user: username + id: 6381 + is_valid: True + issue_tracker_url: + license: Apache + min_ansible_version: 1.4 + modified: 2015-12-08T18:43:49.085Z + namespace: username + open_issues_count: 0 + path: /Users/username/projects/roles + scm: None + src: username.repo_name + stargazers_count: 0 + travis_status_url: https://travis-ci.org/username/repo_name.svg?branch=master + version: + watchers_count: 1 + +.. note:: + + The format of results pictured here is new in Ansible 2.0. + + +List Installed Roles +-------------------- + +The list command shows the name and version of each role installed in roles_path. + +:: + + $ ansible-galaxy list + + - chouseknecht.role-install_mongod, master + - chouseknecht.test-role-1, v1.0.2 + - chrismeyersfsu.role-iptables, master + - chrismeyersfsu.role-required_vars, master + +Remove an Installed Role +------------------------ + +The remove command will delete a role from roles_path: + +:: + + $ ansible-galaxy remove username.rolename + +Authenticate with Galaxy +------------------------ + +To use the import, delete and setup commands authentication with Galaxy is required. The login command will authenticate the user,retrieve a token from Galaxy, and store it in the user's home directory. + +:: + + $ ansible-galaxy login + + We need your Github login to identify you. + This information will not be sent to Galaxy, only to api.github.com. + The password will not be displayed. + + Use --github-token if you do not want to enter your password. + + Github Username: dsmith + Password for dsmith: + Succesfully logged into Galaxy as dsmith + +As depicted above, the login command prompts for a GitHub username and password. It does NOT send your password to Galaxy. It actually authenticates with GitHub and creates a personal access token. It then sends the personal access token to Galaxy, which in turn verifies that you are you and returns a Galaxy access token. After authentication completes the GitHub personal access token is destroyed. + +If you do not wish to use your GitHub password, or if you have two-factor authentication enabled with GitHub, use the --github-token option to pass a personal access token that you create. Log into GitHub, go to Settings and click on Personal Access Token to create a token. + +Import a Role +------------- + +Roles can be imported using ansible-galaxy. The import command expects that the user previously authenticated with Galaxy using the login command. + +Import any GitHub repo you have access to: + +:: + + $ ansible-galaxy import github_user github_repo + +By default the command will wait for the role to be imported by Galaxy, displaying the results as the import progresses: + +:: + + Successfully submitted import request 41 + Starting import 41: role_name=myrole repo=githubuser/ansible-role-repo ref= + Retrieving Github repo githubuser/ansible-role-repo + Accessing branch: master + Parsing and validating meta/main.yml + Parsing galaxy_tags + Parsing platforms + Adding dependencies + Parsing and validating README.md + Adding repo tags as role versions + Import completed + Status SUCCESS : warnings=0 errors=0 + +Use the --branch option to import a specific branch. If not specified, the default branch for the repo will be used. + +If the --no-wait option is present, the command will not wait for results. Results of the most recent import for any of your roles is available on the Galaxy web site under My Imports. + +.. note:: + + The import command is only available in Ansible 2.0. + +Delete a Role +------------- + +Remove a role from the Galaxy web site using the delete command. You can delete any role that you have access to in GitHub. The delete command expects that the user previously authenticated with Galaxy using the login command. + +:: + + ansible-galaxy delete github_user github_repo + +This only removes the role from Galaxy. It does not impact the actual GitHub repo. + +.. note:: + + The delete command is only available in Ansible 2.0. + +Setup Travis Integerations +-------------------------- + +Using the setup command you can enable notifications from `travis `_. The setup command expects that the user previously authenticated with Galaxy using the login command. + +:: + + $ ansible-galaxy setup travis github_user github_repo xxxtravistokenxxx + + Added integration for travis chouseknecht/ansible-role-sendmail + +The setup command requires your Travis token. The Travis token is not stored in Galaxy. It is used along with the GitHub username and repo to create a hash as described in `the Travis documentation `_. The calculated hash is stored in Galaxy and used to verify notifications received from Travis. + +The setup command enables Galaxy to respond to notifications. Follow the `Travis getting started guide `_ to enable the Travis build process for the role repository. + +When you create your .travis.yml file add the following to cause Travis to notify Galaxy when a build completes: + +:: + + notifications: + webhooks: https://galaxy.ansible.com/api/v1/notifications/ + +.. note:: + + The setup command is only available in Ansible 2.0. + + +List Travis Integrtions +======================= + +Use the --list option to display your Travis integrations: + +:: + + $ ansible-galaxy setup --list + + + ID Source Repo + ---------- ---------- ---------- + 2 travis github_user/github_repo + 1 travis github_user/github_repo + + +Remove Travis Integrations +========================== + +Use the --remove option to disable a Travis integration: + +:: + + $ ansible-galaxy setup --remove ID + +Provide the ID of the integration you want disabled. Use the --list option to get the ID. + + + + + + + + + + + + + + + + + + diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 94c04614ace..01e0475b24b 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -22,10 +22,11 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -import os import os.path import sys import yaml +import json +import time from collections import defaultdict from jinja2 import Environment @@ -36,7 +37,10 @@ from ansible.errors import AnsibleError, AnsibleOptionsError from ansible.galaxy import Galaxy from ansible.galaxy.api import GalaxyAPI from ansible.galaxy.role import GalaxyRole +from ansible.galaxy.login import GalaxyLogin +from ansible.galaxy.token import GalaxyToken from ansible.playbook.role.requirement import RoleRequirement +from ansible.module_utils.urls import open_url try: from __main__ import display @@ -44,18 +48,52 @@ except ImportError: from ansible.utils.display import Display display = Display() - class GalaxyCLI(CLI): - VALID_ACTIONS = ("init", "info", "install", "list", "remove", "search") - SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" ) + available_commands = { + "delete": "remove a role from Galaxy", + "import": "add a role contained in a GitHub repo to Galaxy", + "info": "display details about a particular role", + "init": "create a role directory structure in your roles path", + "install": "download a role into your roles path", + "list": "enumerate roles found in your roles path", + "login": "authenticate with Galaxy API and store the token", + "remove": "delete a role from your roles path", + "search": "query the Galaxy API", + "setup": "add a TravisCI integration to Galaxy", + } + SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" ) + def __init__(self, args): - + self.VALID_ACTIONS = self.available_commands.keys() + self.VALID_ACTIONS.sort() self.api = None self.galaxy = None super(GalaxyCLI, self).__init__(args) + def set_action(self): + """ + Get the action the user wants to execute from the sys argv list. + """ + for i in range(0,len(self.args)): + arg = self.args[i] + if arg in self.VALID_ACTIONS: + self.action = arg + del self.args[i] + break + + if not self.action: + self.show_available_actions() + + def show_available_actions(self): + # list available commands + display.display(u'\n' + "usage: ansible-galaxy COMMAND [--help] [options] ...") + display.display(u'\n' + "availabe commands:" + u'\n\n') + for key in self.VALID_ACTIONS: + display.display(u'\t' + "%-12s %s" % (key, self.available_commands[key])) + display.display(' ') + def parse(self): ''' create an options parser for bin/ansible ''' @@ -63,11 +101,21 @@ class GalaxyCLI(CLI): usage = "usage: %%prog [%s] [--help] [options] ..." % "|".join(self.VALID_ACTIONS), epilog = "\nSee '%s --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0]) ) - + self.set_action() # options specific to actions - if self.action == "info": + if self.action == "delete": + self.parser.set_usage("usage: %prog delete [options] github_user github_repo") + elif self.action == "import": + self.parser.set_usage("usage: %prog import [options] github_user github_repo") + self.parser.add_option('-n', '--no-wait', dest='wait', action='store_false', default=True, + help='Don\'t wait for import results.') + self.parser.add_option('-b', '--branch', dest='reference', + help='The name of a branch to import. Defaults to the repository\'s default branch (usually master)') + self.parser.add_option('-t', '--status', dest='check_status', action='store_true', default=False, + help='Check the status of the most recent import request for given github_user/github_repo.') + elif self.action == "info": self.parser.set_usage("usage: %prog info [options] role_name[,version]") elif self.action == "init": self.parser.set_usage("usage: %prog init [options] role_name") @@ -83,27 +131,40 @@ class GalaxyCLI(CLI): self.parser.add_option('-n', '--no-deps', dest='no_deps', action='store_true', default=False, help='Don\'t download roles listed as dependencies') self.parser.add_option('-r', '--role-file', dest='role_file', - help='A file containing a list of roles to be imported') + help='A file containing a list of roles to be imported') elif self.action == "remove": self.parser.set_usage("usage: %prog remove role1 role2 ...") elif self.action == "list": self.parser.set_usage("usage: %prog list [role_name]") + elif self.action == "login": + self.parser.set_usage("usage: %prog login [options]") + self.parser.add_option('-g','--github-token', dest='token', default=None, + help='Identify with github token rather than username and password.') elif self.action == "search": self.parser.add_option('--platforms', dest='platforms', help='list of OS platforms to filter by') self.parser.add_option('--galaxy-tags', dest='tags', help='list of galaxy tags to filter by') - self.parser.set_usage("usage: %prog search [] [--galaxy-tags ] [--platforms platform]") + self.parser.add_option('--author', dest='author', + help='GitHub username') + self.parser.set_usage("usage: %prog search [searchterm1 searchterm2] [--galaxy-tags galaxy_tag1,galaxy_tag2] [--platforms platform1,platform2] [--author username]") + elif self.action == "setup": + self.parser.set_usage("usage: %prog setup [options] source github_user github_repo secret" + + u'\n\n' + "Create an integration with travis.") + self.parser.add_option('-r', '--remove', dest='remove_id', default=None, + help='Remove the integration matching the provided ID value. Use --list to see ID values.') + self.parser.add_option('-l', '--list', dest="setup_list", action='store_true', default=False, + help='List all of your integrations.') # options that apply to more than one action - if self.action != "init": + if not self.action in ("config","import","init","login","setup"): self.parser.add_option('-p', '--roles-path', dest='roles_path', default=C.DEFAULT_ROLES_PATH, help='The path to the directory containing your roles. ' 'The default is the roles_path configured in your ' 'ansible.cfg file (/etc/ansible/roles if not configured)') - if self.action in ("info","init","install","search"): - self.parser.add_option('-s', '--server', dest='api_server', default="https://galaxy.ansible.com", + if self.action in ("import","info","init","install","login","search","setup","delete"): + self.parser.add_option('-s', '--server', dest='api_server', default=C.GALAXY_SERVER, help='The API server destination') self.parser.add_option('-c', '--ignore-certs', action='store_false', dest='validate_certs', default=True, help='Ignore SSL certificate validation errors.') @@ -112,23 +173,25 @@ class GalaxyCLI(CLI): self.parser.add_option('-f', '--force', dest='force', action='store_true', default=False, help='Force overwriting an existing role') - # get options, args and galaxy object - self.options, self.args =self.parser.parse_args(self.args[1:]) - display.verbosity = self.options.verbosity - self.galaxy = Galaxy(self.options) + if self.action: + # get options, args and galaxy object + self.options, self.args =self.parser.parse_args() + display.verbosity = self.options.verbosity + self.galaxy = Galaxy(self.options) return True def run(self): + if not self.action: + return True + super(GalaxyCLI, self).run() # if not offline, get connect to galaxy api - if self.action in ("info","install", "search") or (self.action == 'init' and not self.options.offline): - api_server = self.options.api_server - self.api = GalaxyAPI(self.galaxy, api_server) - if not self.api: - raise AnsibleError("The API server (%s) is not responding, please try again later." % api_server) + if self.action in ("import","info","install","search","login","setup","delete") or \ + (self.action == 'init' and not self.options.offline): + self.api = GalaxyAPI(self.galaxy) self.execute() @@ -188,7 +251,7 @@ class GalaxyCLI(CLI): "however it will reset any main.yml files that may have\n" "been modified there already." % role_path) - # create the default README.md + # create default README.md if not os.path.exists(role_path): os.makedirs(role_path) readme_path = os.path.join(role_path, "README.md") @@ -196,9 +259,16 @@ class GalaxyCLI(CLI): f.write(self.galaxy.default_readme) f.close() + # create default .travis.yml + travis = Environment().from_string(self.galaxy.default_travis).render() + f = open(os.path.join(role_path, '.travis.yml'), 'w') + f.write(travis) + f.close() + for dir in GalaxyRole.ROLE_DIRS: dir_path = os.path.join(init_path, role_name, dir) main_yml_path = os.path.join(dir_path, 'main.yml') + # create the directory if it doesn't exist already if not os.path.exists(dir_path): os.makedirs(dir_path) @@ -234,6 +304,20 @@ class GalaxyCLI(CLI): f.write(rendered_meta) f.close() pass + elif dir == "tests": + # create tests/test.yml + inject = dict( + role_name = role_name + ) + playbook = Environment().from_string(self.galaxy.default_test).render(inject) + f = open(os.path.join(dir_path, 'test.yml'), 'w') + f.write(playbook) + f.close() + + # create tests/inventory + f = open(os.path.join(dir_path, 'inventory'), 'w') + f.write('localhost') + f.close() elif dir not in ('files','templates'): # just write a (mostly) empty YAML file for main.yml f = open(main_yml_path, 'w') @@ -325,7 +409,7 @@ class GalaxyCLI(CLI): for role in required_roles: role = RoleRequirement.role_yaml_parse(role) - display.debug('found role %s in yaml file' % str(role)) + display.vvv('found role %s in yaml file' % str(role)) if 'name' not in role and 'scm' not in role: raise AnsibleError("Must specify name or src for role") roles_left.append(GalaxyRole(self.galaxy, **role)) @@ -348,7 +432,7 @@ class GalaxyCLI(CLI): roles_left.append(GalaxyRole(self.galaxy, rname.strip())) for role in roles_left: - display.debug('Installing role %s ' % role.name) + display.vvv('Installing role %s ' % role.name) # query the galaxy API for the role data if role.install_info is not None and not force: @@ -458,21 +542,189 @@ class GalaxyCLI(CLI): return 0 def execute_search(self): - + page_size = 1000 search = None - if len(self.args) > 1: - raise AnsibleOptionsError("At most a single search term is allowed.") - elif len(self.args) == 1: - search = self.args.pop() - - response = self.api.search_roles(search, self.options.platforms, self.options.tags) - - if 'count' in response: - display.display("Found %d roles matching your search:\n" % response['count']) + + if len(self.args): + terms = [] + for i in range(len(self.args)): + terms.append(self.args.pop()) + search = '+'.join(terms) + + if not search and not self.options.platforms and not self.options.tags and not self.options.author: + raise AnsibleError("Invalid query. At least one search term, platform, galaxy tag or author must be provided.") + + response = self.api.search_roles(search, platforms=self.options.platforms, + tags=self.options.tags, author=self.options.author, page_size=page_size) + + if response['count'] == 0: + display.display("No roles match your search.", color="yellow") + return True data = '' - if 'results' in response: - for role in response['results']: - data += self._display_role_info(role) + if response['count'] > page_size: + data += ("Found %d roles matching your search. Showing first %s.\n" % (response['count'], page_size)) + else: + data += ("Found %d roles matching your search:\n" % response['count']) + + max_len = [] + for role in response['results']: + max_len.append(len(role['username'] + '.' + role['name'])) + name_len = max(max_len) + format_str = " %%-%ds %%s\n" % name_len + data +='\n' + data += (format_str % ("Name", "Description")) + data += (format_str % ("----", "-----------")) + for role in response['results']: + data += (format_str % (role['username'] + '.' + role['name'],role['description'])) + self.pager(data) + + return True + + def execute_login(self): + """ + Verify user's identify via Github and retreive an auth token from Galaxy. + """ + # Authenticate with github and retrieve a token + if self.options.token is None: + login = GalaxyLogin(self.galaxy) + github_token = login.create_github_token() + else: + github_token = self.options.token + + galaxy_response = self.api.authenticate(github_token) + + if self.options.token is None: + # Remove the token we created + login.remove_github_token() + + # Store the Galaxy token + token = GalaxyToken() + token.set(galaxy_response['token']) + + display.display("Succesfully logged into Galaxy as %s" % galaxy_response['username']) + return 0 + + def execute_import(self): + """ + Import a role into Galaxy + """ + + colors = { + 'INFO': 'normal', + 'WARNING': 'yellow', + 'ERROR': 'red', + 'SUCCESS': 'green', + 'FAILED': 'red' + } + + if len(self.args) < 2: + raise AnsibleError("Expected a github_username and github_repository. Use --help.") + + github_repo = self.args.pop() + github_user = self.args.pop() + + if self.options.check_status: + task = self.api.get_import_task(github_user=github_user, github_repo=github_repo) + else: + # Submit an import request + task = self.api.create_import_task(github_user, github_repo, reference=self.options.reference) + + if len(task) > 1: + # found multiple roles associated with github_user/github_repo + display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user,github_repo), + color='yellow') + display.display("The following Galaxy roles are being updated:" + u'\n', color='yellow') + for t in task: + display.display('%s.%s' % (t['summary_fields']['role']['namespace'],t['summary_fields']['role']['name']), color='yellow') + display.display(u'\n' + "To properly namespace this role, remove each of the above and re-import %s/%s from scratch" % (github_user,github_repo), + color='yellow') + return 0 + # found a single role as expected + display.display("Successfully submitted import request %d" % task[0]['id']) + if not self.options.wait: + display.display("Role name: %s" % task[0]['summary_fields']['role']['name']) + display.display("Repo: %s/%s" % (task[0]['github_user'],task[0]['github_repo'])) + + if self.options.check_status or self.options.wait: + # Get the status of the import + msg_list = [] + finished = False + while not finished: + task = self.api.get_import_task(task_id=task[0]['id']) + for msg in task[0]['summary_fields']['task_messages']: + if msg['id'] not in msg_list: + display.display(msg['message_text'], color=colors[msg['message_type']]) + msg_list.append(msg['id']) + if task[0]['state'] in ['SUCCESS', 'FAILED']: + finished = True + else: + time.sleep(10) + + return 0 + + def execute_setup(self): + """ + Setup an integration from Github or Travis + """ + + if self.options.setup_list: + # List existing integration secrets + secrets = self.api.list_secrets() + if len(secrets) == 0: + # None found + display.display("No integrations found.") + return 0 + display.display(u'\n' + "ID Source Repo", color="green") + display.display("---------- ---------- ----------", color="green") + for secret in secrets: + display.display("%-10s %-10s %s/%s" % (secret['id'], secret['source'], secret['github_user'], + secret['github_repo']),color="green") + return 0 + + if self.options.remove_id: + # Remove a secret + self.api.remove_secret(self.options.remove_id) + display.display("Secret removed. Integrations using this secret will not longer work.", color="green") + return 0 + + if len(self.args) < 4: + raise AnsibleError("Missing one or more arguments. Expecting: source github_user github_repo secret") + return 0 + + secret = self.args.pop() + github_repo = self.args.pop() + github_user = self.args.pop() + source = self.args.pop() + + resp = self.api.add_secret(source, github_user, github_repo, secret) + display.display("Added integration for %s %s/%s" % (resp['source'], resp['github_user'], resp['github_repo'])) + + return 0 + + def execute_delete(self): + """ + Delete a role from galaxy.ansible.com + """ + + if len(self.args) < 2: + raise AnsibleError("Missing one or more arguments. Expected: github_user github_repo") + + github_repo = self.args.pop() + github_user = self.args.pop() + resp = self.api.delete_role(github_user, github_repo) + + if len(resp['deleted_roles']) > 1: + display.display("Deleted the following roles:") + display.display("ID User Name") + display.display("------ --------------- ----------") + for role in resp['deleted_roles']: + display.display("%-8s %-15s %s" % (role.id,role.namespace,role.name)) + + display.display(resp['status']) + + return True + + diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 0f809db7297..ae10c5e9a42 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -255,7 +255,8 @@ ACCELERATE_MULTI_KEY = get_config(p, 'accelerate', 'accelerate_multi_k PARAMIKO_PTY = get_config(p, 'paramiko_connection', 'pty', 'ANSIBLE_PARAMIKO_PTY', True, boolean=True) # galaxy related -DEFAULT_GALAXY_URI = get_config(p, 'galaxy', 'server_uri', 'ANSIBLE_GALAXY_SERVER_URI', 'https://galaxy.ansible.com') +GALAXY_SERVER = get_config(p, 'galaxy', 'server', 'ANSIBLE_GALAXY_SERVER', 'https://galaxy.ansible.com') +GALAXY_IGNORE_CERTS = get_config(p, 'galaxy', 'ignore_certs', 'ANSIBLE_GALAXY_IGNORE', False, boolean=True) # this can be configured to blacklist SCMS but cannot add new ones unless the code is also updated GALAXY_SCMS = get_config(p, 'galaxy', 'scms', 'ANSIBLE_GALAXY_SCMS', 'git, hg', islist=True) diff --git a/lib/ansible/galaxy/__init__.py b/lib/ansible/galaxy/__init__.py index 00d8c25aecf..62823fced47 100644 --- a/lib/ansible/galaxy/__init__.py +++ b/lib/ansible/galaxy/__init__.py @@ -52,6 +52,8 @@ class Galaxy(object): #TODO: move to getter for lazy loading self.default_readme = self._str_from_data_file('readme') self.default_meta = self._str_from_data_file('metadata_template.j2') + self.default_test = self._str_from_data_file('test_playbook.j2') + self.default_travis = self._str_from_data_file('travis.j2') def add_role(self, role): self.roles[role.name] = role diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py index 2918688406f..c1bf2c4ed50 100644 --- a/lib/ansible/galaxy/api.py +++ b/lib/ansible/galaxy/api.py @@ -25,11 +25,15 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type import json +import urllib + from urllib2 import quote as urlquote, HTTPError from urlparse import urlparse +import ansible.constants as C from ansible.errors import AnsibleError from ansible.module_utils.urls import open_url +from ansible.galaxy.token import GalaxyToken try: from __main__ import display @@ -43,45 +47,113 @@ class GalaxyAPI(object): SUPPORTED_VERSIONS = ['v1'] - def __init__(self, galaxy, api_server): + def __init__(self, galaxy): self.galaxy = galaxy - - try: - urlparse(api_server, scheme='https') - except: - raise AnsibleError("Invalid server API url passed: %s" % api_server) - - server_version = self.get_server_api_version('%s/api/' % (api_server)) - if not server_version: - raise AnsibleError("Could not retrieve server API version: %s" % api_server) - + self.token = GalaxyToken() + self._api_server = C.GALAXY_SERVER + self._validate_certs = C.GALAXY_IGNORE_CERTS + + # set validate_certs + if galaxy.options.validate_certs == False: + self._validate_certs = False + display.vvv('Check for valid certs: %s' % self._validate_certs) + + # set the API server + if galaxy.options.api_server != C.GALAXY_SERVER: + self._api_server = galaxy.options.api_server + display.vvv("Connecting to galaxy_server: %s" % self._api_server) + + server_version = self.get_server_api_version() + if server_version in self.SUPPORTED_VERSIONS: - self.baseurl = '%s/api/%s' % (api_server, server_version) + self.baseurl = '%s/api/%s' % (self._api_server, server_version) self.version = server_version # for future use - display.vvvvv("Base API: %s" % self.baseurl) + display.vvv("Base API: %s" % self.baseurl) else: raise AnsibleError("Unsupported Galaxy server API version: %s" % server_version) - def get_server_api_version(self, api_server): + def __auth_header(self): + token = self.token.get() + if token is None: + raise AnsibleError("No access token. You must first use login to authenticate and obtain an access token.") + return {'Authorization': 'Token ' + token} + + def __call_galaxy(self, url, args=None, headers=None, method=None): + if args and not headers: + headers = self.__auth_header() + try: + display.vvv(url) + resp = open_url(url, data=args, validate_certs=self._validate_certs, headers=headers, method=method) + data = json.load(resp) + except HTTPError as e: + res = json.load(e) + raise AnsibleError(res['detail']) + return data + + @property + def api_server(self): + return self._api_server + + @property + def validate_certs(self): + return self._validate_certs + + def get_server_api_version(self): """ Fetches the Galaxy API current version to ensure the API server is up and reachable. """ - #TODO: fix galaxy server which returns current_version path (/api/v1) vs actual version (v1) - # also should set baseurl using supported_versions which has path - return 'v1' - try: - data = json.load(open_url(api_server, validate_certs=self.galaxy.options.validate_certs)) - return data.get("current_version", 'v1') - except Exception: - # TODO: report error - return None + url = '%s/api/' % self._api_server + data = json.load(open_url(url, validate_certs=self._validate_certs)) + return data['current_version'] + except Exception as e: + raise AnsibleError("The API server (%s) is not responding, please try again later." % url) + + def authenticate(self, github_token): + """ + Retrieve an authentication token + """ + url = '%s/tokens/' % self.baseurl + args = urllib.urlencode({"github_token": github_token}) + resp = open_url(url, data=args, validate_certs=self._validate_certs, method="POST") + data = json.load(resp) + return data + + def create_import_task(self, github_user, github_repo, reference=None): + """ + Post an import request + """ + url = '%s/imports/' % self.baseurl + args = urllib.urlencode({ + "github_user": github_user, + "github_repo": github_repo, + "github_reference": reference if reference else "" + }) + data = self.__call_galaxy(url, args=args) + if data.get('results', None): + return data['results'] + return data + def get_import_task(self, task_id=None, github_user=None, github_repo=None): + """ + Check the status of an import task. + """ + url = '%s/imports/' % self.baseurl + if not task_id is None: + url = "%s?id=%d" % (url,task_id) + elif not github_user is None and not github_repo is None: + url = "%s?github_user=%s&github_repo=%s" % (url,github_user,github_repo) + else: + raise AnsibleError("Expected task_id or github_user and github_repo") + + data = self.__call_galaxy(url) + return data['results'] + def lookup_role_by_name(self, role_name, notify=True): """ - Find a role by name + Find a role by name. """ role_name = urlquote(role_name) @@ -92,18 +164,12 @@ class GalaxyAPI(object): if notify: display.display("- downloading role '%s', owned by %s" % (role_name, user_name)) except: - raise AnsibleError("- invalid role name (%s). Specify role as format: username.rolename" % role_name) + raise AnsibleError("Invalid role name (%s). Specify role as format: username.rolename" % role_name) url = '%s/roles/?owner__username=%s&name=%s' % (self.baseurl, user_name, role_name) - display.vvvv("- %s" % (url)) - try: - data = json.load(open_url(url, validate_certs=self.galaxy.options.validate_certs)) - if len(data["results"]) != 0: - return data["results"][0] - except: - # TODO: report on connection/availability errors - pass - + data = self.__call_galaxy(url) + if len(data["results"]) != 0: + return data["results"][0] return None def fetch_role_related(self, related, role_id): @@ -114,13 +180,12 @@ class GalaxyAPI(object): try: url = '%s/roles/%d/%s/?page_size=50' % (self.baseurl, int(role_id), related) - data = json.load(open_url(url, validate_certs=self.galaxy.options.validate_certs)) + data = self.__call_galaxy(url) results = data['results'] done = (data.get('next', None) is None) while not done: url = '%s%s' % (self.baseurl, data['next']) - display.display(url) - data = json.load(open_url(url, validate_certs=self.galaxy.options.validate_certs)) + data = self.__call_galaxy(url) results += data['results'] done = (data.get('next', None) is None) return results @@ -131,10 +196,9 @@ class GalaxyAPI(object): """ Fetch the list of items specified. """ - try: url = '%s/%s/?page_size' % (self.baseurl, what) - data = json.load(open_url(url, validate_certs=self.galaxy.options.validate_certs)) + data = self.__call_galaxy(url) if "results" in data: results = data['results'] else: @@ -144,41 +208,64 @@ class GalaxyAPI(object): done = (data.get('next', None) is None) while not done: url = '%s%s' % (self.baseurl, data['next']) - display.display(url) - data = json.load(open_url(url, validate_certs=self.galaxy.options.validate_certs)) + data = self.__call_galaxy(url) results += data['results'] done = (data.get('next', None) is None) return results except Exception as error: raise AnsibleError("Failed to download the %s list: %s" % (what, str(error))) - def search_roles(self, search, platforms=None, tags=None): + def search_roles(self, search, **kwargs): - search_url = self.baseurl + '/roles/?page=1' + search_url = self.baseurl + '/search/roles/?' if search: - search_url += '&search=' + urlquote(search) + search_url += '&autocomplete=' + urlquote(search) + + tags = kwargs.get('tags',None) + platforms = kwargs.get('platforms', None) + page_size = kwargs.get('page_size', None) + author = kwargs.get('author', None) - if tags is None: - tags = [] - elif isinstance(tags, basestring): + if tags and isinstance(tags, basestring): tags = tags.split(',') + search_url += '&tags_autocomplete=' + '+'.join(tags) + + if platforms and isinstance(platforms, basestring): + platforms = platforms.split(',') + search_url += '&platforms_autocomplete=' + '+'.join(platforms) - for tag in tags: - search_url += '&chain__tags__name=' + urlquote(tag) + if page_size: + search_url += '&page_size=%s' % page_size - if platforms is None: - platforms = [] - elif isinstance(platforms, basestring): - platforms = platforms.split(',') + if author: + search_url += '&username_autocomplete=%s' % author + + data = self.__call_galaxy(search_url) + return data - for plat in platforms: - search_url += '&chain__platforms__name=' + urlquote(plat) + def add_secret(self, source, github_user, github_repo, secret): + url = "%s/notification_secrets/" % self.baseurl + args = urllib.urlencode({ + "source": source, + "github_user": github_user, + "github_repo": github_repo, + "secret": secret + }) + data = self.__call_galaxy(url, args=args) + return data - display.debug("Executing query: %s" % search_url) - try: - data = json.load(open_url(search_url, validate_certs=self.galaxy.options.validate_certs)) - except HTTPError as e: - raise AnsibleError("Unsuccessful request to server: %s" % str(e)) + def list_secrets(self): + url = "%s/notification_secrets" % self.baseurl + data = self.__call_galaxy(url, headers=self.__auth_header()) + return data + + def remove_secret(self, secret_id): + url = "%s/notification_secrets/%s/" % (self.baseurl, secret_id) + data = self.__call_galaxy(url, headers=self.__auth_header(), method='DELETE') + return data + def delete_role(self, github_user, github_repo): + url = "%s/removerole/?github_user=%s&github_repo=%s" % (self.baseurl,github_user,github_repo) + data = self.__call_galaxy(url, headers=self.__auth_header(), method='DELETE') return data diff --git a/lib/ansible/galaxy/data/metadata_template.j2 b/lib/ansible/galaxy/data/metadata_template.j2 index c618adb3d4b..1054c64bdfa 100644 --- a/lib/ansible/galaxy/data/metadata_template.j2 +++ b/lib/ansible/galaxy/data/metadata_template.j2 @@ -2,9 +2,11 @@ galaxy_info: author: {{ author }} description: {{description}} company: {{ company }} + # If the issue tracker for your role is not on github, uncomment the # next line and provide a value # issue_tracker_url: {{ issue_tracker_url }} + # Some suggested licenses: # - BSD (default) # - MIT @@ -13,7 +15,17 @@ galaxy_info: # - Apache # - CC-BY license: {{ license }} + min_ansible_version: {{ min_ansible_version }} + + # Optionally specify the branch Galaxy will use when accessing the GitHub + # repo for this role. During role install, if no tags are available, + # Galaxy will use this branch. During import Galaxy will access files on + # this branch. If travis integration is cofigured, only notification for this + # branch will be accepted. Otherwise, in all cases, the repo's default branch + # (usually master) will be used. + #github_branch: + # # Below are all platforms currently available. Just uncomment # the ones that apply to your role. If you don't see your @@ -28,6 +40,7 @@ galaxy_info: # - {{ version }} {%- endfor %} {%- endfor %} + galaxy_tags: [] # List tags for your role here, one per line. A tag is # a keyword that describes and categorizes the role. @@ -36,6 +49,7 @@ galaxy_info: # # NOTE: A tag is limited to a single word comprised of # alphanumeric characters. Maximum 20 tags per role. + dependencies: [] # List your role dependencies here, one per line. # Be sure to remove the '[]' above if you add dependencies diff --git a/lib/ansible/galaxy/data/test_playbook.j2 b/lib/ansible/galaxy/data/test_playbook.j2 new file mode 100644 index 00000000000..45824f60519 --- /dev/null +++ b/lib/ansible/galaxy/data/test_playbook.j2 @@ -0,0 +1,5 @@ +--- +- hosts: localhost + remote_user: root + roles: + - {{ role_name }} \ No newline at end of file diff --git a/lib/ansible/galaxy/data/travis.j2 b/lib/ansible/galaxy/data/travis.j2 new file mode 100644 index 00000000000..36bbf6208cf --- /dev/null +++ b/lib/ansible/galaxy/data/travis.j2 @@ -0,0 +1,29 @@ +--- +language: python +python: "2.7" + +# Use the new container infrastructure +sudo: false + +# Install ansible +addons: + apt: + packages: + - python-pip + +install: + # Install ansible + - pip install ansible + + # Check ansible version + - ansible --version + + # Create ansible.cfg with correct roles_path + - printf '[defaults]\nroles_path=../' >ansible.cfg + +script: + # Basic role syntax check + - ansible-playbook tests/test.yml -i tests/inventory --syntax-check + +notifications: + webhooks: https://galaxy.ansible.com/api/v1/notifications/ \ No newline at end of file diff --git a/lib/ansible/galaxy/login.py b/lib/ansible/galaxy/login.py new file mode 100644 index 00000000000..3edaed7bc70 --- /dev/null +++ b/lib/ansible/galaxy/login.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + +######################################################################## +# +# (C) 2015, Chris Houseknecht +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . +# +######################################################################## + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import getpass +import json +import urllib + +from urllib2 import quote as urlquote, HTTPError +from urlparse import urlparse + +from ansible.errors import AnsibleError, AnsibleOptionsError +from ansible.module_utils.urls import open_url +from ansible.utils.color import stringc + +try: + from __main__ import display +except ImportError: + from ansible.utils.display import Display + display = Display() + +class GalaxyLogin(object): + ''' Class to handle authenticating user with Galaxy API prior to performing CUD operations ''' + + GITHUB_AUTH = 'https://api.github.com/authorizations' + + def __init__(self, galaxy, github_token=None): + self.galaxy = galaxy + self.github_username = None + self.github_password = None + + if github_token == None: + self.get_credentials() + + def get_credentials(self): + display.display(u'\n\n' + "We need your " + stringc("Github login",'bright cyan') + + " to identify you.", screen_only=True) + display.display("This information will " + stringc("not be sent to Galaxy",'bright cyan') + + ", only to " + stringc("api.github.com.","yellow"), screen_only=True) + display.display("The password will not be displayed." + u'\n\n', screen_only=True) + display.display("Use " + stringc("--github-token",'yellow') + + " if you do not want to enter your password." + u'\n\n', screen_only=True) + + try: + self.github_username = raw_input("Github Username: ") + except: + pass + + try: + self.github_password = getpass.getpass("Password for %s: " % self.github_username) + except: + pass + + if not self.github_username or not self.github_password: + raise AnsibleError("Invalid Github credentials. Username and password are required.") + + def remove_github_token(self): + ''' + If for some reason an ansible-galaxy token was left from a prior login, remove it. We cannot + retrieve the token after creation, so we are forced to create a new one. + ''' + try: + tokens = json.load(open_url(self.GITHUB_AUTH, url_username=self.github_username, + url_password=self.github_password, force_basic_auth=True,)) + except HTTPError as e: + res = json.load(e) + raise AnsibleError(res['message']) + + for token in tokens: + if token['note'] == 'ansible-galaxy login': + display.vvvvv('removing token: %s' % token['token_last_eight']) + try: + open_url('https://api.github.com/authorizations/%d' % token['id'], url_username=self.github_username, + url_password=self.github_password, method='DELETE', force_basic_auth=True,) + except HTTPError as e: + res = json.load(e) + raise AnsibleError(res['message']) + + def create_github_token(self): + ''' + Create a personal authorization token with a note of 'ansible-galaxy login' + ''' + self.remove_github_token() + args = json.dumps({"scopes":["public_repo"], "note":"ansible-galaxy login"}) + try: + data = json.load(open_url(self.GITHUB_AUTH, url_username=self.github_username, + url_password=self.github_password, force_basic_auth=True, data=args)) + except HTTPError as e: + res = json.load(e) + raise AnsibleError(res['message']) + return data['token'] diff --git a/lib/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py index dc9da5d79ce..36b1e0fbbba 100644 --- a/lib/ansible/galaxy/role.py +++ b/lib/ansible/galaxy/role.py @@ -46,7 +46,7 @@ class GalaxyRole(object): SUPPORTED_SCMS = set(['git', 'hg']) META_MAIN = os.path.join('meta', 'main.yml') META_INSTALL = os.path.join('meta', '.galaxy_install_info') - ROLE_DIRS = ('defaults','files','handlers','meta','tasks','templates','vars') + ROLE_DIRS = ('defaults','files','handlers','meta','tasks','templates','vars','tests') def __init__(self, galaxy, name, src=None, version=None, scm=None, path=None): @@ -198,10 +198,10 @@ class GalaxyRole(object): role_data = self.src tmp_file = self.fetch(role_data) else: - api = GalaxyAPI(self.galaxy, self.options.api_server) + api = GalaxyAPI(self.galaxy) role_data = api.lookup_role_by_name(self.src) if not role_data: - raise AnsibleError("- sorry, %s was not found on %s." % (self.src, self.options.api_server)) + raise AnsibleError("- sorry, %s was not found on %s." % (self.src, api.api_server)) role_versions = api.fetch_role_related('versions', role_data['id']) if not self.version: @@ -213,8 +213,10 @@ class GalaxyRole(object): loose_versions = [LooseVersion(a.get('name',None)) for a in role_versions] loose_versions.sort() self.version = str(loose_versions[-1]) + elif role_data.get('github_branch', None): + self.version = role_data['github_branch'] else: - self.version = 'master' + self.version = 'master' elif self.version != 'master': if role_versions and self.version not in [a.get('name', None) for a in role_versions]: raise AnsibleError("- the specified version (%s) of %s was not found in the list of available versions (%s)." % (self.version, self.name, role_versions)) diff --git a/lib/ansible/galaxy/token.py b/lib/ansible/galaxy/token.py new file mode 100644 index 00000000000..02ca8330697 --- /dev/null +++ b/lib/ansible/galaxy/token.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +######################################################################## +# +# (C) 2015, Chris Houseknecht +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . +# +######################################################################## +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import yaml +from stat import * + +try: + from __main__ import display +except ImportError: + from ansible.utils.display import Display + display = Display() + + +class GalaxyToken(object): + ''' Class to storing and retrieving token in ~/.ansible_galaxy ''' + + def __init__(self): + self.file = os.path.expanduser("~") + '/.ansible_galaxy' + self.config = yaml.safe_load(self.__open_config_for_read()) + if not self.config: + self.config = {} + + def __open_config_for_read(self): + if os.path.isfile(self.file): + display.vvv('Opened %s' % self.file) + return open(self.file, 'r') + # config.yml not found, create and chomd u+rw + f = open(self.file,'w') + f.close() + os.chmod(self.file,S_IRUSR|S_IWUSR) # owner has +rw + display.vvv('Created %s' % self.file) + return open(self.file, 'r') + + def set(self, token): + self.config['token'] = token + self.save() + + def get(self): + return self.config.get('token', None) + + def save(self): + with open(self.file,'w') as f: + yaml.safe_dump(self.config,f,default_flow_style=False) + \ No newline at end of file From 04fc3f118f5989df4c2ba462d86a75d0b72fc50a Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 9 Dec 2015 08:23:45 -0800 Subject: [PATCH 0065/1113] Code smell test for specifying both required and default in FieldAttributes --- .travis.yml | 1 + test/code-smell/required-and-default-attributes.sh | 10 ++++++++++ 2 files changed, 11 insertions(+) create mode 100755 test/code-smell/required-and-default-attributes.sh diff --git a/.travis.yml b/.travis.yml index 1ff0ca118d4..603132f722c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,6 +24,7 @@ script: - ./test/code-smell/replace-urlopen.sh . - ./test/code-smell/use-compat-six.sh lib - ./test/code-smell/boilerplate.sh +- ./test/code-smell/required-and-default-attributes.sh - if test x"$TOXENV" != x'py24' ; then tox ; fi - if test x"$TOXENV" = x'py24' ; then python2.4 -V && python2.4 -m compileall -fq -x 'module_utils/(a10|rax|openstack|ec2|gce).py' lib/ansible/module_utils ; fi #- make -C docsite all diff --git a/test/code-smell/required-and-default-attributes.sh b/test/code-smell/required-and-default-attributes.sh new file mode 100755 index 00000000000..9822a155973 --- /dev/null +++ b/test/code-smell/required-and-default-attributes.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +BASEDIR=${1-"lib/ansible"} +cd "$BASEDIR" +grep -r FieldAttribute . |grep 'default' | grep 'required' +if test $? -eq 0 ; then + exit 1 +fi +exit 0 + From c64298de02a9998d6c5774ccb1f92a9aec435d74 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 08:22:58 -0800 Subject: [PATCH 0066/1113] Revert "avoid persistent containers in attribute defaults" This reverts commit 87969868d42cd8aba1c65c8207d059d73407373b. found better way to do it --- lib/ansible/playbook/attribute.py | 11 ----------- lib/ansible/playbook/block.py | 6 +++--- lib/ansible/playbook/conditional.py | 2 +- lib/ansible/playbook/play.py | 16 ++++++++-------- lib/ansible/playbook/play_context.py | 4 ++-- lib/ansible/playbook/playbook_include.py | 2 +- lib/ansible/playbook/role/metadata.py | 2 +- lib/ansible/playbook/taggable.py | 2 +- lib/ansible/playbook/task.py | 2 +- 9 files changed, 18 insertions(+), 29 deletions(-) diff --git a/lib/ansible/playbook/attribute.py b/lib/ansible/playbook/attribute.py index ce7ed6d8fe7..703d9dbca1e 100644 --- a/lib/ansible/playbook/attribute.py +++ b/lib/ansible/playbook/attribute.py @@ -32,17 +32,6 @@ class Attribute: self.priority = priority self.always_post_validate = always_post_validate - # This is here to avoid `default=` unwanted persistence across object instances - # We cannot rely on None as some fields use it to skip the code - # that would detect an empty container as a user error - if self.default == '_ansible_container': - if self.isa == 'list': - self.default = [] - elif self.isa == 'dict': - self.default = {} - elif self.isa == 'set': - self.default = set() - def __eq__(self, other): return other.priority == self.priority diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py index 66009b028af..f2d9c82833a 100644 --- a/lib/ansible/playbook/block.py +++ b/lib/ansible/playbook/block.py @@ -30,9 +30,9 @@ from ansible.playbook.taggable import Taggable class Block(Base, Become, Conditional, Taggable): - _block = FieldAttribute(isa='list', default='_ansible_container') - _rescue = FieldAttribute(isa='list', default='_ansible_container') - _always = FieldAttribute(isa='list', default='_ansible_container') + _block = FieldAttribute(isa='list', default=[]) + _rescue = FieldAttribute(isa='list', default=[]) + _always = FieldAttribute(isa='list', default=[]) _delegate_to = FieldAttribute(isa='list') _delegate_facts = FieldAttribute(isa='bool', default=False) diff --git a/lib/ansible/playbook/conditional.py b/lib/ansible/playbook/conditional.py index a5b3ca725f8..fc178e2fa1d 100644 --- a/lib/ansible/playbook/conditional.py +++ b/lib/ansible/playbook/conditional.py @@ -33,7 +33,7 @@ class Conditional: to be run conditionally when a condition is met or skipped. ''' - _when = FieldAttribute(isa='list', default='_ansible_container') + _when = FieldAttribute(isa='list', default=[]) def __init__(self, loader=None): # when used directly, this class needs a loader, but we want to diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py index e08c8c60016..ed61416e951 100644 --- a/lib/ansible/playbook/play.py +++ b/lib/ansible/playbook/play.py @@ -64,22 +64,22 @@ class Play(Base, Taggable, Become): # Connection _gather_facts = FieldAttribute(isa='bool', default=None, always_post_validate=True) - _hosts = FieldAttribute(isa='list', default='_ansible_container', required=True, listof=string_types, always_post_validate=True) + _hosts = FieldAttribute(isa='list', default=[], required=True, listof=string_types, always_post_validate=True) _name = FieldAttribute(isa='string', default='', always_post_validate=True) # Variable Attributes - _vars_files = FieldAttribute(isa='list', default='_ansible_container', priority=99) - _vars_prompt = FieldAttribute(isa='list', default='_ansible_container', always_post_validate=True) + _vars_files = FieldAttribute(isa='list', default=[], priority=99) + _vars_prompt = FieldAttribute(isa='list', default=[], always_post_validate=True) _vault_password = FieldAttribute(isa='string', always_post_validate=True) # Role Attributes - _roles = FieldAttribute(isa='list', default='_ansible_container', priority=90) + _roles = FieldAttribute(isa='list', default=[], priority=90) # Block (Task) Lists Attributes - _handlers = FieldAttribute(isa='list', default='_ansible_container') - _pre_tasks = FieldAttribute(isa='list', default='_ansible_container') - _post_tasks = FieldAttribute(isa='list', default='_ansible_container') - _tasks = FieldAttribute(isa='list', default='_ansible_container') + _handlers = FieldAttribute(isa='list', default=[]) + _pre_tasks = FieldAttribute(isa='list', default=[]) + _post_tasks = FieldAttribute(isa='list', default=[]) + _tasks = FieldAttribute(isa='list', default=[]) # Flag/Setting Attributes _any_errors_fatal = FieldAttribute(isa='bool', default=False, always_post_validate=True) diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py index da291c3c834..81223500adf 100644 --- a/lib/ansible/playbook/play_context.py +++ b/lib/ansible/playbook/play_context.py @@ -171,8 +171,8 @@ class PlayContext(Base): # general flags _verbosity = FieldAttribute(isa='int', default=0) - _only_tags = FieldAttribute(isa='set', default='_ansible_container') - _skip_tags = FieldAttribute(isa='set', default='_ansible_container') + _only_tags = FieldAttribute(isa='set', default=set()) + _skip_tags = FieldAttribute(isa='set', default=set()) _check_mode = FieldAttribute(isa='bool', default=False) _force_handlers = FieldAttribute(isa='bool', default=False) _start_at_task = FieldAttribute(isa='string') diff --git a/lib/ansible/playbook/playbook_include.py b/lib/ansible/playbook/playbook_include.py index 52081c41539..d9af2ba5237 100644 --- a/lib/ansible/playbook/playbook_include.py +++ b/lib/ansible/playbook/playbook_include.py @@ -35,7 +35,7 @@ class PlaybookInclude(Base, Conditional, Taggable): _name = FieldAttribute(isa='string') _include = FieldAttribute(isa='string') - _vars = FieldAttribute(isa='dict', default='_ansible_container') + _vars = FieldAttribute(isa='dict', default=dict()) @staticmethod def load(data, basedir, variable_manager=None, loader=None): diff --git a/lib/ansible/playbook/role/metadata.py b/lib/ansible/playbook/role/metadata.py index 4bb7d0ce02b..58b59145a1c 100644 --- a/lib/ansible/playbook/role/metadata.py +++ b/lib/ansible/playbook/role/metadata.py @@ -40,7 +40,7 @@ class RoleMetadata(Base): ''' _allow_duplicates = FieldAttribute(isa='bool', default=False) - _dependencies = FieldAttribute(isa='list', default='_ansible_container') + _dependencies = FieldAttribute(isa='list', default=[]) _galaxy_info = FieldAttribute(isa='GalaxyInfo') def __init__(self, owner=None): diff --git a/lib/ansible/playbook/taggable.py b/lib/ansible/playbook/taggable.py index 37e3261e80d..8f5cfa09344 100644 --- a/lib/ansible/playbook/taggable.py +++ b/lib/ansible/playbook/taggable.py @@ -29,7 +29,7 @@ from ansible.template import Templar class Taggable: untagged = frozenset(['untagged']) - _tags = FieldAttribute(isa='list', default='_ansible_container', listof=(string_types,int)) + _tags = FieldAttribute(isa='list', default=[], listof=(string_types,int)) def __init__(self): super(Taggable, self).__init__() diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index 53a9a3c3931..17f1952e39c 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -64,7 +64,7 @@ class Task(Base, Conditional, Taggable, Become): # will be used if defined # might be possible to define others - _args = FieldAttribute(isa='dict', default='_ansible_container') + _args = FieldAttribute(isa='dict', default=dict()) _action = FieldAttribute(isa='string') _any_errors_fatal = FieldAttribute(isa='bool') From 2820b4c243d50416f661c4ea9408bba1918244bb Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 08:23:45 -0800 Subject: [PATCH 0067/1113] removed default from hosts to make it requried prevents writing a play w/o a hosts entry which would default to all/empty --- lib/ansible/playbook/play.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py index ed61416e951..bc033148646 100644 --- a/lib/ansible/playbook/play.py +++ b/lib/ansible/playbook/play.py @@ -64,7 +64,7 @@ class Play(Base, Taggable, Become): # Connection _gather_facts = FieldAttribute(isa='bool', default=None, always_post_validate=True) - _hosts = FieldAttribute(isa='list', default=[], required=True, listof=string_types, always_post_validate=True) + _hosts = FieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True) _name = FieldAttribute(isa='string', default='', always_post_validate=True) # Variable Attributes From 2bfb13bfb39bf31c5c1bc40f376907fc50ca69ef Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 08:28:54 -0800 Subject: [PATCH 0068/1113] removed unused 'pattern' from ansible.cfg also moved the config param to a 'deprecated' list in constants.py added TODO for producing a deprecation warning for such vars --- examples/ansible.cfg | 1 - lib/ansible/constants.py | 8 ++++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/examples/ansible.cfg b/examples/ansible.cfg index 87c089f45ae..ec3ddf20641 100644 --- a/examples/ansible.cfg +++ b/examples/ansible.cfg @@ -14,7 +14,6 @@ #inventory = /etc/ansible/hosts #library = /usr/share/my_modules/ #remote_tmp = $HOME/.ansible/tmp -#pattern = * #forks = 5 #poll_interval = 15 #sudo_user = root diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index ae10c5e9a42..7f74358dd5d 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -120,16 +120,20 @@ DEFAULT_COW_WHITELIST = ['bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'd # sections in config file DEFAULTS='defaults' +# FIXME: add deprecation warning when these get set +#### DEPRECATED VARS #### +# use more sanely named 'inventory' DEPRECATED_HOST_LIST = get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', '/etc/ansible/hosts', ispath=True) +# this is not used since 0.5 but people might still have in config +DEFAULT_PATTERN = get_config(p, DEFAULTS, 'pattern', None, None) -# generally configurable things +#### GENERALLY CONFIGURABLE THINGS #### DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, boolean=True) DEFAULT_HOST_LIST = get_config(p, DEFAULTS,'inventory', 'ANSIBLE_INVENTORY', DEPRECATED_HOST_LIST, ispath=True) DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', None, ispath=True) DEFAULT_ROLES_PATH = get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles', ispath=True) DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp') DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command') -DEFAULT_PATTERN = get_config(p, DEFAULTS, 'pattern', None, '*') DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, integer=True) DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '') DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', os.getenv('LANG', 'en_US.UTF-8')) From ae2447df9136353453c9ed48d44b2c7fa70231b0 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 08:38:53 -0800 Subject: [PATCH 0069/1113] attribute defaults that are containers are a copy This is simpler way to prevent persistent containers across instances of classes that use field attributes --- lib/ansible/playbook/attribute.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/ansible/playbook/attribute.py b/lib/ansible/playbook/attribute.py index 703d9dbca1e..0befb9d80df 100644 --- a/lib/ansible/playbook/attribute.py +++ b/lib/ansible/playbook/attribute.py @@ -19,6 +19,7 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type +from copy import deepcopy class Attribute: @@ -32,6 +33,11 @@ class Attribute: self.priority = priority self.always_post_validate = always_post_validate + if default is not None and self.isa in ('list', 'dict', 'set'): + self.default = deepcopy(default) + else: + self.default = default + def __eq__(self, other): return other.priority == self.priority From 0211da2fe9a7b3cefa79d72aab599546bf923e1b Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 9 Dec 2015 08:44:09 -0800 Subject: [PATCH 0070/1113] Clarify language of delegate_facts documentation --- docsite/rst/playbooks_delegation.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docsite/rst/playbooks_delegation.rst b/docsite/rst/playbooks_delegation.rst index 4e2e8c372ac..c715adea361 100644 --- a/docsite/rst/playbooks_delegation.rst +++ b/docsite/rst/playbooks_delegation.rst @@ -137,8 +137,8 @@ Delegated facts .. versionadded:: 2.0 -Before 2.0 any facts gathered by a delegated task were assigned to the `inventory_hostname` (current host) instead of the host which actually produced the facts (delegated to host). -The new directive `delegate_facts` if set to `True` will assing the task's gathered facts to the delegated host instead of the current one.:: +By default, any fact gathered by a delegated task are assigned to the `inventory_hostname` (the current host) instead of the host which actually produced the facts (the delegated to host). +In 2.0, the directive `delegate_facts` may be set to `True` to assign the task's gathered facts to the delegated host instead of the current one.:: - hosts: app_servers @@ -149,8 +149,8 @@ The new directive `delegate_facts` if set to `True` will assing the task's gathe delegate_facts: True with_items: "{{groups['dbservers'}}" -The above will gather facts for the machines in the dbservers group and assign the facts to those machines and not to app_servers, -that way you can lookup `hostvars['dbhost1']['default_ipv4_addresses'][0]` even though dbservers were not part of the play, or left out by using `--limit`. +The above will gather facts for the machines in the dbservers group and assign the facts to those machines and not to app_servers. +This way you can lookup `hostvars['dbhost1']['default_ipv4_addresses'][0]` even though dbservers were not part of the play, or left out by using `--limit`. .. _run_once: From 7936a4687e9be3752bdbee006d956ed4f2687160 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 10:01:21 -0800 Subject: [PATCH 0071/1113] adhoc avoids callbacks by default as it did before Previous emptying of whitelist only affected callbacks that were constructed for need whitelist. This now works for all callbacks. --- lib/ansible/cli/adhoc.py | 4 +--- lib/ansible/executor/task_queue_manager.py | 5 +++-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py index 912b07a5c72..f6dcb37a8ab 100644 --- a/lib/ansible/cli/adhoc.py +++ b/lib/ansible/cli/adhoc.py @@ -163,9 +163,6 @@ class AdHocCLI(CLI): else: cb = 'minimal' - if not C.DEFAULT_LOAD_CALLBACK_PLUGINS: - C.DEFAULT_CALLBACK_WHITELIST = [] - if self.options.tree: C.DEFAULT_CALLBACK_WHITELIST.append('tree') C.TREE_DIR = self.options.tree @@ -180,6 +177,7 @@ class AdHocCLI(CLI): options=self.options, passwords=passwords, stdout_callback=cb, + run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, ) result = self._tqm.run(play) finally: diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py index d665000046c..70cefee510b 100644 --- a/lib/ansible/executor/task_queue_manager.py +++ b/lib/ansible/executor/task_queue_manager.py @@ -56,7 +56,7 @@ class TaskQueueManager: which dispatches the Play's tasks to hosts. ''' - def __init__(self, inventory, variable_manager, loader, options, passwords, stdout_callback=None): + def __init__(self, inventory, variable_manager, loader, options, passwords, stdout_callback=None, run_additional_callbacks=True): self._inventory = inventory self._variable_manager = variable_manager @@ -65,6 +65,7 @@ class TaskQueueManager: self._stats = AggregateStats() self.passwords = passwords self._stdout_callback = stdout_callback + self._run_additional_callbacks = run_additional_callbacks self._callbacks_loaded = False self._callback_plugins = [] @@ -159,7 +160,7 @@ class TaskQueueManager: if callback_name != self._stdout_callback or stdout_callback_loaded: continue stdout_callback_loaded = True - elif callback_needs_whitelist and (C.DEFAULT_CALLBACK_WHITELIST is None or callback_name not in C.DEFAULT_CALLBACK_WHITELIST): + elif not self._run_additional_callbacks or (callback_needs_whitelist and (C.DEFAULT_CALLBACK_WHITELIST is None or callback_name not in C.DEFAULT_CALLBACK_WHITELIST)): continue self._callback_plugins.append(callback_plugin()) From 04d74fd6804b5a851cc8762cecf07b100e4dcc6f Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 10:13:50 -0800 Subject: [PATCH 0072/1113] reenabled --tree for ansible adhoc command previous fix to avoid callbacks now conflicted with tree optoin which is implemented as a callback in 2.0 --- lib/ansible/cli/adhoc.py | 3 +++ lib/ansible/executor/task_queue_manager.py | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py index f6dcb37a8ab..3de0e55b7bb 100644 --- a/lib/ansible/cli/adhoc.py +++ b/lib/ansible/cli/adhoc.py @@ -163,9 +163,11 @@ class AdHocCLI(CLI): else: cb = 'minimal' + run_tree=False if self.options.tree: C.DEFAULT_CALLBACK_WHITELIST.append('tree') C.TREE_DIR = self.options.tree + run_tree=True # now create a task queue manager to execute the play self._tqm = None @@ -178,6 +180,7 @@ class AdHocCLI(CLI): passwords=passwords, stdout_callback=cb, run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, + run_tree=run_tree, ) result = self._tqm.run(play) finally: diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py index 70cefee510b..74111382935 100644 --- a/lib/ansible/executor/task_queue_manager.py +++ b/lib/ansible/executor/task_queue_manager.py @@ -56,7 +56,7 @@ class TaskQueueManager: which dispatches the Play's tasks to hosts. ''' - def __init__(self, inventory, variable_manager, loader, options, passwords, stdout_callback=None, run_additional_callbacks=True): + def __init__(self, inventory, variable_manager, loader, options, passwords, stdout_callback=None, run_additional_callbacks=True, run_tree=False): self._inventory = inventory self._variable_manager = variable_manager @@ -66,6 +66,7 @@ class TaskQueueManager: self.passwords = passwords self._stdout_callback = stdout_callback self._run_additional_callbacks = run_additional_callbacks + self._run_tree = run_tree self._callbacks_loaded = False self._callback_plugins = [] @@ -160,6 +161,8 @@ class TaskQueueManager: if callback_name != self._stdout_callback or stdout_callback_loaded: continue stdout_callback_loaded = True + elif callback_name == 'tree' and self._run_tree: + pass elif not self._run_additional_callbacks or (callback_needs_whitelist and (C.DEFAULT_CALLBACK_WHITELIST is None or callback_name not in C.DEFAULT_CALLBACK_WHITELIST)): continue From 14e19c239d610619498f06978e2841764a262e15 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 9 Dec 2015 14:51:43 -0500 Subject: [PATCH 0073/1113] Make on_file_diff callback item-aware --- lib/ansible/plugins/callback/__init__.py | 6 +++++- lib/ansible/plugins/callback/default.py | 9 ++++++++- lib/ansible/plugins/callback/skippy.py | 9 ++++++++- lib/ansible/plugins/strategy/__init__.py | 2 +- 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py index 03eb58d99db..b8a48943f28 100644 --- a/lib/ansible/plugins/callback/__init__.py +++ b/lib/ansible/plugins/callback/__init__.py @@ -59,6 +59,10 @@ class CallbackBase: version = getattr(self, 'CALLBACK_VERSION', '1.0') self._display.vvvv('Loaded callback %s of type %s, v%s' % (name, ctype, version)) + def _copy_result(self, result): + ''' helper for callbacks, so they don't all have to include deepcopy ''' + return deepcopy(result) + def _dump_results(self, result, indent=None, sort_keys=True, keep_invocation=False): if result.get('_ansible_no_log', False): return json.dumps(dict(censored="the output has been hidden due to the fact that 'no_log: true' was specified for this result")) @@ -126,7 +130,7 @@ class CallbackBase: def _process_items(self, result): for res in result._result['results']: - newres = deepcopy(result) + newres = self._copy_result(result) res['item'] = self._get_item(res) newres._result = res if 'failed' in res and res['failed']: diff --git a/lib/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py index 3175bf3e53c..1f37f4b975e 100644 --- a/lib/ansible/plugins/callback/default.py +++ b/lib/ansible/plugins/callback/default.py @@ -134,7 +134,14 @@ class CallbackModule(CallbackBase): self._display.banner(msg) def v2_on_file_diff(self, result): - if 'diff' in result._result and result._result['diff']: + if result._task.loop and 'results' in result._result: + for res in result._result['results']: + newres = self._copy_result(result) + res['item'] = self._get_item(res) + newres._result = res + + self.v2_on_file_diff(newres) + elif 'diff' in result._result and result._result['diff']: self._display.display(self._get_diff(result._result['diff'])) def v2_playbook_item_on_ok(self, result): diff --git a/lib/ansible/plugins/callback/skippy.py b/lib/ansible/plugins/callback/skippy.py index 15b7d3387c2..495943417fd 100644 --- a/lib/ansible/plugins/callback/skippy.py +++ b/lib/ansible/plugins/callback/skippy.py @@ -123,7 +123,14 @@ class CallbackModule(CallbackBase): self._display.banner(msg) def v2_on_file_diff(self, result): - if 'diff' in result._result and result._result['diff']: + if result._task.loop and 'results' in result._result: + for res in result._result['results']: + newres = self._copy_result(result) + res['item'] = self._get_item(res) + newres._result = res + + self.v2_on_file_diff(newres) + elif 'diff' in result._result and result._result['diff']: self._display.display(self._get_diff(result._result['diff'])) def v2_playbook_item_on_ok(self, result): diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index 732a9293d28..15636b580d1 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -221,7 +221,7 @@ class StrategyBase: self._tqm._stats.increment('changed', host.name) self._tqm.send_callback('v2_runner_on_ok', task_result) - if self._diff and 'diff' in task_result._result: + if self._diff: self._tqm.send_callback('v2_on_file_diff', task_result) self._pending_results -= 1 From 61dc4a7e67bcb7c968e273ee39618d1f76f7ab9e Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 9 Dec 2015 12:10:21 -0800 Subject: [PATCH 0074/1113] Update module refs --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 191347676ee..0b5555b62cd 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 191347676eea08817da3fb237f24cdbf2d16e307 +Subproject commit 0b5555b62cd8d91fb4fa434217671f3acaebbf5a diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index a10bdd6be94..cbed6420094 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit a10bdd6be948d3aa5fad7ff4959908d6e78e0528 +Subproject commit cbed642009497ddaf19b5f578ab6c78da1356eda From 64864829c4a858e296b049075675e960de678690 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 9 Dec 2015 12:37:56 -0800 Subject: [PATCH 0075/1113] changed deprecation to removal warning --- lib/ansible/inventory/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index d7d0f03fb1f..3c1331e7065 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -388,7 +388,7 @@ class Inventory(object): end = -1 subscript = (int(start), int(end)) if sep == '-': - display.deprecated("Use [x:y] inclusive subscripts instead of [x-y]", version=2.0, removed=True) + display.warning("Use [x:y] inclusive subscripts instead of [x-y] which has been removed") return (pattern, subscript) From 07bf4d9ac4899eb2e0e8246530ff2ca3ee75f3ef Mon Sep 17 00:00:00 2001 From: nitzmahone Date: Wed, 9 Dec 2015 15:48:53 -0500 Subject: [PATCH 0076/1113] added winrm CP notes to changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3d31ef4ebb2..2bf11e6c5bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -85,6 +85,8 @@ newline being stripped you can change your playbook like this: ###Plugins * Rewritten dnf module that should be faster and less prone to encountering bugs in cornercases +* WinRM connection plugin passes all vars named `ansible_winrm_*` to the underlying pywinrm client. This allows, for instance, `ansible_winrm_server_cert_validation=ignore` to be used with newer versions of pywinrm to disable certificate validation on Python 2.7.9+. +* WinRM connection plugin put_file is significantly faster and no longer has file size limitations. ####Deprecated Modules (new ones in parens): From c0d79cf7e10da157ae1b28283ab7b564baee7b51 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 9 Dec 2015 13:07:00 -0800 Subject: [PATCH 0077/1113] Remove the funcd connection plugin --- lib/ansible/plugins/connection/funcd.py | 99 ------------------------- 1 file changed, 99 deletions(-) delete mode 100644 lib/ansible/plugins/connection/funcd.py diff --git a/lib/ansible/plugins/connection/funcd.py b/lib/ansible/plugins/connection/funcd.py deleted file mode 100644 index 4c9e09be65c..00000000000 --- a/lib/ansible/plugins/connection/funcd.py +++ /dev/null @@ -1,99 +0,0 @@ -# Based on local.py (c) 2012, Michael DeHaan -# Based on chroot.py (c) 2013, Maykel Moya -# (c) 2013, Michael Scherer -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see . - -# --- -# The func transport permit to use ansible over func. For people who have already setup -# func and that wish to play with ansible, this permit to move gradually to ansible -# without having to redo completely the setup of the network. -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -HAVE_FUNC=False -try: - import func.overlord.client as fc - HAVE_FUNC=True -except ImportError: - pass - -import os -from ansible.callbacks import vvv -from ansible import errors -import tempfile -import shutil - - -class Connection(object): - ''' Func-based connections ''' - - def __init__(self, runner, host, port, *args, **kwargs): - self.runner = runner - self.host = host - self.has_pipelining = False - # port is unused, this go on func - self.port = port - - def connect(self, port=None): - if not HAVE_FUNC: - raise errors.AnsibleError("func is not installed") - - self.client = fc.Client(self.host) - return self - - def exec_command(self, cmd, become_user=None, sudoable=False, - executable='/bin/sh', in_data=None): - ''' run a command on the remote minion ''' - - if in_data: - raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining") - - # totally ignores privlege escalation - vvv("EXEC %s" % (cmd), host=self.host) - p = self.client.command.run(cmd)[self.host] - return (p[0], p[1], p[2]) - - def _normalize_path(self, path, prefix): - if not path.startswith(os.path.sep): - path = os.path.join(os.path.sep, path) - normpath = os.path.normpath(path) - return os.path.join(prefix, normpath[1:]) - - def put_file(self, in_path, out_path): - ''' transfer a file from local to remote ''' - - out_path = self._normalize_path(out_path, '/') - vvv("PUT %s TO %s" % (in_path, out_path), host=self.host) - self.client.local.copyfile.send(in_path, out_path) - - def fetch_file(self, in_path, out_path): - ''' fetch a file from remote to local ''' - - in_path = self._normalize_path(in_path, '/') - vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host) - # need to use a tmp dir due to difference of semantic for getfile - # ( who take a # directory as destination) and fetch_file, who - # take a file directly - tmpdir = tempfile.mkdtemp(prefix="func_ansible") - self.client.local.getfile.get(in_path, tmpdir) - shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)), - out_path) - shutil.rmtree(tmpdir) - - def close(self): - ''' terminate the connection; nothing to do here ''' - pass From 18ac12aee60b0033d4b8af4a78ddbd55335c2991 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yannig=20Perr=C3=A9?= Date: Wed, 9 Dec 2015 22:08:30 +0100 Subject: [PATCH 0078/1113] Do not fail when variable is not correct in debug action. See https://github.com/ansible/ansible/issues/13484 for more information. --- lib/ansible/plugins/action/debug.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/debug.py b/lib/ansible/plugins/action/debug.py index 1d8e28c7a4a..a0ffb714044 100644 --- a/lib/ansible/plugins/action/debug.py +++ b/lib/ansible/plugins/action/debug.py @@ -40,7 +40,7 @@ class ActionModule(ActionBase): result['msg'] = self._task.args['msg'] # FIXME: move the LOOKUP_REGEX somewhere else elif 'var' in self._task.args: # and not utils.LOOKUP_REGEX.search(self._task.args['var']): - results = self._templar.template(self._task.args['var'], convert_bare=True) + results = self._templar.template(self._task.args['var'], convert_bare=True, fail_on_undefined=False) if type(self._task.args['var']) in (list, dict): # If var is a list or dict, use the type as key to display result[to_unicode(type(self._task.args['var']))] = results From a7cd41b482dc6bf1bf1073e451aa1b38526dde08 Mon Sep 17 00:00:00 2001 From: nitzmahone Date: Wed, 9 Dec 2015 16:29:39 -0500 Subject: [PATCH 0079/1113] Windows doc updates --- docsite/rst/intro_windows.rst | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/docsite/rst/intro_windows.rst b/docsite/rst/intro_windows.rst index e5cbb94fafd..1adcc35010f 100644 --- a/docsite/rst/intro_windows.rst +++ b/docsite/rst/intro_windows.rst @@ -31,7 +31,7 @@ On a Linux control machine:: Active Directory Support ++++++++++++++++++++++++ -If you wish to connect to domain accounts published through Active Directory (as opposed to local accounts created on the remote host), you will need to install the "python-kerberos" module and the MIT krb5 libraries it depends on. +If you wish to connect to domain accounts published through Active Directory (as opposed to local accounts created on the remote host), you will need to install the "python-kerberos" module on the Ansible control host (and the MIT krb5 libraries it depends on). The Ansible control host also requires a properly configured computer account in Active Directory. Installing python-kerberos dependencies --------------------------------------- @@ -131,7 +131,9 @@ To test this, ping the windows host you want to control by name then use the ip If you get different hostnames back than the name you originally pinged, speak to your active directory administrator and get them to check that DNS Scavenging is enabled and that DNS and DHCP are updating each other. -Check your ansible controller's clock is synchronised with your domain controller. Kerberos is time sensitive and a little clock drift can cause tickets not be granted. +Ensure that the Ansible controller has a properly configured computer account in the domain. + +Check your Ansible controller's clock is synchronised with your domain controller. Kerberos is time sensitive and a little clock drift can cause tickets not be granted. Check you are using the real fully qualified domain name for the domain. Sometimes domains are commonly known to users by aliases. To check this run: @@ -165,6 +167,8 @@ In group_vars/windows.yml, define the following inventory variables:: ansible_password: SecretPasswordGoesHere ansible_port: 5986 ansible_connection: winrm + # The following is necessary for Python 2.7.9+ when using default WinRM self-signed certificates: + ansible_winrm_server_cert_validation: ignore Although Ansible is mostly an SSH-oriented system, Windows management will not happen over SSH (`yet `). @@ -189,6 +193,7 @@ Since 2.0, the following custom inventory variables are also supported for addit * ``ansible_winrm_path``: Specify an alternate path to the WinRM endpoint. Ansible uses ``/wsman`` by default. * ``ansible_winrm_realm``: Specify the realm to use for Kerberos authentication. If the username contains ``@``, Ansible will use the part of the username after ``@`` by default. * ``ansible_winrm_transport``: Specify one or more transports as a comma-separated list. By default, Ansible will use ``kerberos,plaintext`` if the ``kerberos`` module is installed and a realm is defined, otherwise ``plaintext``. +* ``ansible_winrm_server_cert_validation``: Specify the server certificate validation mode (``ignore`` or ``validate``). Ansible defaults to ``validate`` on Python 2.7.9 and higher, which will result in certificate validation errors against the Windows self-signed certificates. Unless verifiable certificates have been configured on the WinRM listeners, this should be set to ``ignore`` * ``ansible_winrm_*``: Any additional keyword arguments supported by ``winrm.Protocol`` may be provided. .. _windows_system_prep: @@ -221,7 +226,7 @@ Getting to PowerShell 3.0 or higher PowerShell 3.0 or higher is needed for most provided Ansible modules for Windows, and is also required to run the above setup script. Note that PowerShell 3.0 is only supported on Windows 7 SP1, Windows Server 2008 SP1, and later releases of Windows. -Looking at an ansible checkout, copy the `examples/scripts/upgrade_to_ps3.ps1 `_ script onto the remote host and run a PowerShell console as an administrator. You will now be running PowerShell 3 and can try connectivity again using the win_ping technique referenced above. +Looking at an Ansible checkout, copy the `examples/scripts/upgrade_to_ps3.ps1 `_ script onto the remote host and run a PowerShell console as an administrator. You will now be running PowerShell 3 and can try connectivity again using the win_ping technique referenced above. .. _what_windows_modules_are_available: @@ -248,10 +253,10 @@ Note there are a few other Ansible modules that don't start with "win" that also Developers: Supported modules and how it works `````````````````````````````````````````````` -Developing ansible modules are covered in a `later section of the documentation `_, with a focus on Linux/Unix. -What if you want to write Windows modules for ansible though? +Developing Ansible modules are covered in a `later section of the documentation `_, with a focus on Linux/Unix. +What if you want to write Windows modules for Ansible though? -For Windows, ansible modules are implemented in PowerShell. Skim those Linux/Unix module development chapters before proceeding. +For Windows, Ansible modules are implemented in PowerShell. Skim those Linux/Unix module development chapters before proceeding. Windows modules live in a "windows/" subfolder in the Ansible "library/" subtree. For example, if a module is named "library/windows/win_ping", there will be embedded documentation in the "win_ping" file, and the actual PowerShell code will live in a "win_ping.ps1" file. Take a look at the sources and this will make more sense. @@ -351,7 +356,7 @@ form of new modules, tweaks to existing modules, documentation, or something els :doc:`developing_modules` How to write modules :doc:`playbooks` - Learning ansible's configuration management language + Learning Ansible's configuration management language `List of Windows Modules `_ Windows specific module list, all implemented in PowerShell `Mailing List `_ From 62cbc03af6410df2b9c61a5056f71a51dd2570ec Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 9 Dec 2015 13:29:53 -0800 Subject: [PATCH 0080/1113] Revert "Remove the funcd connection plugin" This reverts commit c0d79cf7e10da157ae1b28283ab7b564baee7b51. We may still port the funcd connection plugin, just not in time for 2.0.0 --- lib/ansible/plugins/connection/funcd.py | 99 +++++++++++++++++++++++++ 1 file changed, 99 insertions(+) create mode 100644 lib/ansible/plugins/connection/funcd.py diff --git a/lib/ansible/plugins/connection/funcd.py b/lib/ansible/plugins/connection/funcd.py new file mode 100644 index 00000000000..4c9e09be65c --- /dev/null +++ b/lib/ansible/plugins/connection/funcd.py @@ -0,0 +1,99 @@ +# Based on local.py (c) 2012, Michael DeHaan +# Based on chroot.py (c) 2013, Maykel Moya +# (c) 2013, Michael Scherer +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +# --- +# The func transport permit to use ansible over func. For people who have already setup +# func and that wish to play with ansible, this permit to move gradually to ansible +# without having to redo completely the setup of the network. +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +HAVE_FUNC=False +try: + import func.overlord.client as fc + HAVE_FUNC=True +except ImportError: + pass + +import os +from ansible.callbacks import vvv +from ansible import errors +import tempfile +import shutil + + +class Connection(object): + ''' Func-based connections ''' + + def __init__(self, runner, host, port, *args, **kwargs): + self.runner = runner + self.host = host + self.has_pipelining = False + # port is unused, this go on func + self.port = port + + def connect(self, port=None): + if not HAVE_FUNC: + raise errors.AnsibleError("func is not installed") + + self.client = fc.Client(self.host) + return self + + def exec_command(self, cmd, become_user=None, sudoable=False, + executable='/bin/sh', in_data=None): + ''' run a command on the remote minion ''' + + if in_data: + raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining") + + # totally ignores privlege escalation + vvv("EXEC %s" % (cmd), host=self.host) + p = self.client.command.run(cmd)[self.host] + return (p[0], p[1], p[2]) + + def _normalize_path(self, path, prefix): + if not path.startswith(os.path.sep): + path = os.path.join(os.path.sep, path) + normpath = os.path.normpath(path) + return os.path.join(prefix, normpath[1:]) + + def put_file(self, in_path, out_path): + ''' transfer a file from local to remote ''' + + out_path = self._normalize_path(out_path, '/') + vvv("PUT %s TO %s" % (in_path, out_path), host=self.host) + self.client.local.copyfile.send(in_path, out_path) + + def fetch_file(self, in_path, out_path): + ''' fetch a file from remote to local ''' + + in_path = self._normalize_path(in_path, '/') + vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host) + # need to use a tmp dir due to difference of semantic for getfile + # ( who take a # directory as destination) and fetch_file, who + # take a file directly + tmpdir = tempfile.mkdtemp(prefix="func_ansible") + self.client.local.getfile.get(in_path, tmpdir) + shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)), + out_path) + shutil.rmtree(tmpdir) + + def close(self): + ''' terminate the connection; nothing to do here ''' + pass From a19e083d33ae5ae59be358c9468a4318aca3174f Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 9 Dec 2015 13:52:01 -0800 Subject: [PATCH 0081/1113] Note that handlers inside of includes are not possible at the moment --- docsite/rst/playbooks_intro.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docsite/rst/playbooks_intro.rst b/docsite/rst/playbooks_intro.rst index e0f1aec5c10..28c809f0132 100644 --- a/docsite/rst/playbooks_intro.rst +++ b/docsite/rst/playbooks_intro.rst @@ -386,6 +386,7 @@ won't need them for much else. * Handler names live in a global namespace. * If two handler tasks have the same name, only one will run. `* `_ + * You cannot notify a handler that is defined inside of an include Roles are described later on, but it's worthwhile to point out that: From a61387846d3e210181683a60df14c8e7cbf46893 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 7 Dec 2015 10:22:07 -0800 Subject: [PATCH 0082/1113] draft release documentation --- docsite/rst/developing_releases.rst | 48 +++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 docsite/rst/developing_releases.rst diff --git a/docsite/rst/developing_releases.rst b/docsite/rst/developing_releases.rst new file mode 100644 index 00000000000..1eeb2421210 --- /dev/null +++ b/docsite/rst/developing_releases.rst @@ -0,0 +1,48 @@ +Releases +======== + +.. contents:: Topics + :local: + +.. schedule:: + +Release Schedule +```````````````` +Ansible is on a 'flexible' 4 month release schedule, sometimes this can be extended if there is a major change that requires a longer cycle (i.e. 2.0 core rewrite). +Currently modules get released at the same time as the main Ansible repo, even though they are separated into ansible-modules-core and ansible-modules-extras. + +The major features and bugs fixed in a release should be reflected in the CHANGELOG.md, minor ones will be in the commit history (FIXME: add git exmaple to list). +When a fix/feature gets added to the `devel` branch it will be part of the next release, some bugfixes can be backported to previous releases and might be part of a minor point release if it is deemed necessary. + +Sometimes an RC can be extended by a few days if a bugfix makes a change that can have far reaching consequences, so users have enough time to find any new issues that may stem from this. + +.. methods:: + +Release methods +```````````````` + +Ansible normally goes through a 'release candidate', issuing an RC1 for a release, if no major bugs are discovered in it after 5 business days we'll get a final release. +Otherwise fixes will be applied and an RC2 will be provided for testing and if no bugs after 2 days, the final release will be made, iterating this last step and incrementing the candidate number as we find major bugs. + + +.. freezing:: + +Release feature freeze +`````````````````````` + +During the release candidate process, the focus will be on bugfixes that affect the RC, new features will be delayed while we try to produce a final version. Some bugfixes that are minor or don't affect the RC will also be postponed until after the release is finalized. + +.. seealso:: + + :doc:`developing_api` + Python API to Playbooks and Ad Hoc Task Execution + :doc:`developing_modules` + How to develop modules + :doc:`developing_plugins` + How to develop plugins + `Ansible Tower `_ + REST API endpoint and GUI for Ansible, syncs with dynamic inventory + `Development Mailing List `_ + Mailing list for development topics + `irc.freenode.net `_ + #ansible IRC chat channel From 2b363434514aa94aad145d2a6eacf4c1013490d8 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 9 Dec 2015 17:57:52 -0500 Subject: [PATCH 0083/1113] Missed one place we were appending the incorrectly escaped item to raw params --- lib/ansible/parsing/splitter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/parsing/splitter.py b/lib/ansible/parsing/splitter.py index f24d8ecf9de..feb0cd2b34b 100644 --- a/lib/ansible/parsing/splitter.py +++ b/lib/ansible/parsing/splitter.py @@ -86,7 +86,7 @@ def parse_kv(args, check_raw=False): # FIXME: make the retrieval of this list of shell/command # options a function, so the list is centralized if check_raw and k not in ('creates', 'removes', 'chdir', 'executable', 'warn'): - raw_params.append(x) + raw_params.append(orig_x) else: options[k.strip()] = unquote(v.strip()) else: From 30e729557f0056ec561288046e2aa933efe899b3 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 9 Dec 2015 16:43:24 -0800 Subject: [PATCH 0084/1113] Add first draft of porting guide for 2.0 --- docsite/rst/porting_guide_2.0.rst | 160 ++++++++++++++++++++++++++++++ 1 file changed, 160 insertions(+) create mode 100644 docsite/rst/porting_guide_2.0.rst diff --git a/docsite/rst/porting_guide_2.0.rst b/docsite/rst/porting_guide_2.0.rst new file mode 100644 index 00000000000..9c26a4b1611 --- /dev/null +++ b/docsite/rst/porting_guide_2.0.rst @@ -0,0 +1,160 @@ +Porting Guide +============= + + +Playbook +-------- + +* backslash escapes When specifying parameters in jinja2 expressions in YAML + dicts, backslashes sometimes needed to be escaped twice. This has been fixed + in 2.0.x so that escaping once works. The following example shows how + playbooks must be modified:: + + # Syntax in 1.9.x + - debug: + msg: "{{ 'test1_junk 1\\\\3' | regex_replace('(.*)_junk (.*)', '\\\\1 \\\\2') }}" + # Syntax in 2.0.x + - debug: + msg: "{{ 'test1_junk 1\\3' | regex_replace('(.*)_junk (.*)', '\\1 \\2') }}" + + # Output: + "msg": "test1 1\\3" + +To make an escaped string that will work on all versions you have two options:: + +- debug: msg="{{ 'test1_junk 1\\3' | regex_replace('(.*)_junk (.*)', '\\1 \\2') }}" + +uses key=value escaping which has not changed. The other option is to check for the ansible version:: + +"{{ (ansible_version|version_compare('ge', '2.0'))|ternary( 'test1_junk 1\\3' | regex_replace('(.*)_junk (.*)', '\\1 \\2') , 'test1_junk 1\\\\3' | regex_replace('(.*)_junk (.*)', '\\\\1 \\\\2') ) }}" + +* trailing newline When a string with a trailing newline was specified in the + playbook via yaml dict format, the trailing newline was stripped. When + specified in key=value format, the trailing newlines were kept. In v2, both + methods of specifying the string will keep the trailing newlines. If you + relied on the trailing newline being stripped, you can change your playbook + using the following as an example:: + + # Syntax in 1.9.x + vars: + message: > + Testing + some things + tasks: + - debug: + msg: "{{ message }}" + + # Syntax in 2.0.x + vars: + old_message: > + Testing + some things + message: "{{ old_messsage[:-1] }}" + - debug: + msg: "{{ message }}" + # Output + "msg": "Testing some things" + +* porting task includes + * More dynamic. Corner-case formats that were not supposed to work now do not, as expected. + * variables defined in the yaml dict format https://github.com/ansible/ansible/issues/13324 + * variable precedence +* templating (variables in playbooks and template lookups) has improved with regard to keeping the original instead of turning everything into a string. + If you need the old behavior, quote the value to pass it around as a string. + Empty variables and variables set to null in yaml are no longer converted to empty strings. They will retain the value of `None`. + You can override the `null_representation` setting to an empty string in your config file by setting the `ANSIBLE_NULL_REPRESENTATION` environment variable. +* Extras callbacks must be whitelisted in ansible.cfg. Copying is no longer necessary but whitelisting in ansible.cfg must be completed. +* dnf module has been rewritten. Some minor changes in behavior may be observed. +* win_updates has been rewritten and works as expected now. + +Deprecated +---------- + +While all items listed here will show a deprecation warning message, they still work as they did in 1.9.x. Please note that they will be removed in 2.2 (Ansible always waits two major releases to remove a deprecated feature). + +* Bare variables in with_ loops should instead use the “{{var}}” syntax, which helps eliminate ambiguity. +* The ansible-galaxy text format requirements file. Users should use the YAML format for requirements instead. +* Undefined variables within a with_ loop’s list currently do not interrupt the loop, but they do issue a warning; in the future, they will issue an error. +* Using variables for task parameters is unsafe and will be removed in a future version. For example:: + + - hosts: localhost + gather_facts: no + vars: + debug_params: + msg: "hello there" + tasks: + - debug: "{{debug_params}}" + +* Host patterns should use a comma (,) or colon (:) instead of a semicolon (;) to separate hosts/groups in the pattern. +* Ranges specified in host patterns should use the [x:y] syntax, instead of [x-y]. +* Playbooks using privilege escalation should always use “become*” options rather than the old su*/sudo* options. +* The “short form” for vars_prompt is no longer supported. +For example:: + +vars_prompt: + variable_name: "Prompt string" + +* Specifying variables at the top level of a task include statement is no longer supported. For example:: + + - include: foo.yml + a: 1 + +Should now be:: + +- include: foo.yml + args: + a: 1 + +* Setting any_errors_fatal on a task is no longer supported. This should be set at the play level only. +* Bare variables in the `environment` dictionary (for plays/tasks/etc.) are no longer supported. Variables specified there should use the full variable syntax: ‘{{foo}}’. +* Tags should no longer be specified with other parameters in a task include. Instead, they should be specified as an option on the task. +For example:: + + - include: foo.yml tags=a,b,c + +Should be:: + + - include: foo.yml + tags: [a, b, c] + +* The first_available_file option on tasks has been deprecated. Users should use the with_first_found option or lookup (‘first_found’, …) plugin. + + +Porting plugins +=============== + +In ansible-1.9.x, you would generally copy an existing plugin to create a new one. Simply implementing the methods and attributes that the caller of the plugin expected made it a plugin of that type. In ansible-2.0, most plugins are implemented by subclassing a base class for each plugin type. This way the custom plugin does not need to contain methods which are not customized. + +.. note:: + +Lookup plugins +-------------- +* lookup plugins ; import version + + +Connection plugins +------------------ + +* connection plugins + +Action plugins +-------------- + +* action plugins + +Callback plugins +---------------- + +* callback plugins + +Connection plugins +------------------ + +* connection plugins + + +Porting custom scripts +====================== + +Custom scripts that used the ``ansible.runner.Runner`` API in 1.x have to be ported in 2.x. Please refer to: +https://github.com/ansible/ansible/blob/devel/docsite/rst/developing_api.rst From fe72fff57da967ff0e53c8026bcd94d67cdb59db Mon Sep 17 00:00:00 2001 From: Michael Scherer Date: Thu, 10 Dec 2015 01:58:17 +0100 Subject: [PATCH 0085/1113] Fix the markdown used for the Windows module section --- docsite/rst/developing_modules.rst | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/docsite/rst/developing_modules.rst b/docsite/rst/developing_modules.rst index bdee4aa83dc..fde4b5704b6 100644 --- a/docsite/rst/developing_modules.rst +++ b/docsite/rst/developing_modules.rst @@ -538,24 +538,34 @@ Windows modules checklist #!powershell -then:: + then:: + -then:: + + then:: + # WANT_JSON # POWERSHELL_COMMON -then, to parse all arguments into a variable modules generally use:: + then, to parse all arguments into a variable modules generally use:: + $params = Parse-Args $args * Arguments: * Try and use state present and state absent like other modules * You need to check that all your mandatory args are present. You can do this using the builtin Get-AnsibleParam function. * Required arguments:: + $package = Get-AnsibleParam -obj $params -name name -failifempty $true + * Required arguments with name validation:: + $state = Get-AnsibleParam -obj $params -name "State" -ValidateSet "Present","Absent" -resultobj $resultobj -failifempty $true + * Optional arguments with name validation:: + $state = Get-AnsibleParam -obj $params -name "State" -default "Present" -ValidateSet "Present","Absent" + * the If "FailIfEmpty" is true, the resultobj parameter is used to specify the object returned to fail-json. You can also override the default message using $emptyattributefailmessage (for missing required attributes) and $ValidateSetErrorMessage (for attribute validation errors) * Look at existing modules for more examples of argument checking. @@ -586,7 +596,7 @@ Starting in 1.8 you can deprecate modules by renaming them with a preceding _, i _old_cloud.py, This will keep the module available but hide it from the primary docs and listing. You can also rename modules and keep an alias to the old name by using a symlink that starts with _. -This example allows the stat module to be called with fileinfo, making the following examples equivalent +This example allows the stat module to be called with fileinfo, making the following examples equivalent:: EXAMPLES = ''' ln -s stat.py _fileinfo.py From c20c1a6d490933fa2ec8961508735422f3a6adeb Mon Sep 17 00:00:00 2001 From: Robin Roth Date: Thu, 10 Dec 2015 11:16:21 +0100 Subject: [PATCH 0086/1113] add depth option to ansible-pull Allows shallow checkouts in ansible-pull by adding `--depth 1` (or higher number) --- lib/ansible/cli/pull.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py index 593d601e8d4..67e89259303 100644 --- a/lib/ansible/cli/pull.py +++ b/lib/ansible/cli/pull.py @@ -80,6 +80,8 @@ class PullCLI(CLI): help='directory to checkout repository to') self.parser.add_option('-U', '--url', dest='url', default=None, help='URL of the playbook repository') + self.parser.add_option('--depth', dest='depth', default=None, + help='Depth of checkout, shallow checkout if greater or equal 1 . Defaults to full checkout.') self.parser.add_option('-C', '--checkout', dest='checkout', help='branch/tag/commit to checkout. ' 'Defaults to behavior of repository module.') self.parser.add_option('--accept-host-key', default=False, dest='accept_host_key', action='store_true', @@ -154,6 +156,10 @@ class PullCLI(CLI): if self.options.verify: repo_opts += ' verify_commit=yes' + + if self.options.depth: + repo_opts += ' depth=%s' % self.options.depth + path = module_loader.find_plugin(self.options.module_name) if path is None: From 6680cc7052dd4ef5bb166008a18a57e0f156df95 Mon Sep 17 00:00:00 2001 From: Charles Paul Date: Thu, 10 Dec 2015 08:04:06 -0500 Subject: [PATCH 0087/1113] allow custom callbacks with adhoc cli for scripting missing import of CallbackBase --- lib/ansible/cli/__init__.py | 3 ++- lib/ansible/cli/adhoc.py | 4 +++- lib/ansible/executor/task_queue_manager.py | 11 +++++++++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py index da1aabcc698..a934a3a8ee5 100644 --- a/lib/ansible/cli/__init__.py +++ b/lib/ansible/cli/__init__.py @@ -66,7 +66,7 @@ class CLI(object): LESS_OPTS = 'FRSX' # -F (quit-if-one-screen) -R (allow raw ansi control chars) # -S (chop long lines) -X (disable termcap init and de-init) - def __init__(self, args): + def __init__(self, args, callback=None): """ Base init method for all command line programs """ @@ -75,6 +75,7 @@ class CLI(object): self.options = None self.parser = None self.action = None + self.callback = callback def set_action(self): """ diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py index 3de0e55b7bb..250241a848f 100644 --- a/lib/ansible/cli/adhoc.py +++ b/lib/ansible/cli/adhoc.py @@ -158,7 +158,9 @@ class AdHocCLI(CLI): play_ds = self._play_ds(pattern, self.options.seconds, self.options.poll_interval) play = Play().load(play_ds, variable_manager=variable_manager, loader=loader) - if self.options.one_line: + if self.callback: + cb = self.callback + elif self.options.one_line: cb = 'oneline' else: cb = 'minimal' diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py index 74111382935..e2b29a5282c 100644 --- a/lib/ansible/executor/task_queue_manager.py +++ b/lib/ansible/executor/task_queue_manager.py @@ -34,6 +34,7 @@ from ansible.playbook.play_context import PlayContext from ansible.plugins import callback_loader, strategy_loader, module_loader from ansible.template import Templar from ansible.vars.hostvars import HostVars +from ansible.plugins.callback import CallbackBase try: from __main__ import display @@ -146,8 +147,14 @@ class TaskQueueManager: if self._stdout_callback is None: self._stdout_callback = C.DEFAULT_STDOUT_CALLBACK - if self._stdout_callback not in callback_loader: - raise AnsibleError("Invalid callback for stdout specified: %s" % self._stdout_callback) + if isinstance(self._stdout_callback, CallbackBase): + self._callback_plugins.append(self._stdout_callback) + stdout_callback_loaded = True + elif isinstance(self._stdout_callback, basestring): + if self._stdout_callback not in callback_loader: + raise AnsibleError("Invalid callback for stdout specified: %s" % self._stdout_callback) + else: + raise AnsibleError("callback must be an instance of CallbackBase or the name of a callback plugin") for callback_plugin in callback_loader.all(class_only=True): if hasattr(callback_plugin, 'CALLBACK_VERSION') and callback_plugin.CALLBACK_VERSION >= 2.0: From 72f0679f685dc6c79fe80736d2ca72f6778b8e5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Warcho=C5=82?= Date: Thu, 10 Dec 2015 16:22:37 +0100 Subject: [PATCH 0088/1113] Explain how 'run_once' interacts with 'serial' --- docsite/rst/playbooks_delegation.rst | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docsite/rst/playbooks_delegation.rst b/docsite/rst/playbooks_delegation.rst index c715adea361..fa808abb65b 100644 --- a/docsite/rst/playbooks_delegation.rst +++ b/docsite/rst/playbooks_delegation.rst @@ -182,13 +182,18 @@ This can be optionally paired with "delegate_to" to specify an individual host t delegate_to: web01.example.org When "run_once" is not used with "delegate_to" it will execute on the first host, as defined by inventory, -in the group(s) of hosts targeted by the play. e.g. webservers[0] if the play targeted "hosts: webservers". +in the group(s) of hosts targeted by the play - e.g. webservers[0] if the play targeted "hosts: webservers". -This approach is similar, although more concise and cleaner than applying a conditional to a task such as:: +This approach is similar to applying a conditional to a task such as:: - command: /opt/application/upgrade_db.py when: inventory_hostname == webservers[0] +.. note:: + When used together with "serial", tasks marked as "run_once" will be ran on one host in *each* serial batch. + If it's crucial that the task is run only once regardless of "serial" mode, use + :code:`inventory_hostname == my_group_name[0]` construct. + .. _local_playbooks: Local Playbooks From 1dda8158ff9aa5240e89711c7279c3d072e0e57e Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 10 Dec 2015 07:28:58 -0800 Subject: [PATCH 0089/1113] become_pass needs to be bytes when it is passed to ssh. Fixes #13240 --- lib/ansible/plugins/connection/ssh.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index debe36bd320..4251f8a63e8 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -463,7 +463,7 @@ class Connection(ConnectionBase): if states[state] == 'awaiting_prompt': if self._flags['become_prompt']: display.debug('Sending become_pass in response to prompt') - stdin.write(self._play_context.become_pass + '\n') + stdin.write('{0}\n'.format(to_bytes(self._play_context.become_pass ))) self._flags['become_prompt'] = False state += 1 elif self._flags['become_success']: From bd9582d0721db3c6e5e24b08c747e02a6391a0a7 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 10 Dec 2015 08:10:45 -0800 Subject: [PATCH 0090/1113] Update submodule refs --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 0b5555b62cd..0d23b3df526 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 0b5555b62cd8d91fb4fa434217671f3acaebbf5a +Subproject commit 0d23b3df526875c8fc6edf94268f3aa850ec05f1 diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index cbed6420094..51813e00333 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit cbed642009497ddaf19b5f578ab6c78da1356eda +Subproject commit 51813e003331c3341b07c5cda33346cada537a3b From c402325085c129ce289c73a808d8d6ac68df096d Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 10 Dec 2015 13:10:17 -0500 Subject: [PATCH 0091/1113] Fixing up docker integration tests a bit --- .../roles/test_docker/tasks/docker-tests.yml | 31 +++---------------- .../test_docker/tasks/registry-tests.yml | 11 ++----- 2 files changed, 8 insertions(+), 34 deletions(-) diff --git a/test/integration/roles/test_docker/tasks/docker-tests.yml b/test/integration/roles/test_docker/tasks/docker-tests.yml index 33ffe6c70ca..14e23f72dd5 100644 --- a/test/integration/roles/test_docker/tasks/docker-tests.yml +++ b/test/integration/roles/test_docker/tasks/docker-tests.yml @@ -8,7 +8,6 @@ image: busybox state: present pull: missing - docker_api_version: "1.14" - name: Run a small script in busybox docker: @@ -17,22 +16,12 @@ pull: always command: "nc -l -p 2000 -e xargs -n1 echo hello" detach: True - docker_api_version: "1.14" - -- name: Get the docker container id - shell: "docker ps | grep busybox | awk '{ print $1 }'" - register: container_id - name: Get the docker container ip - shell: "docker inspect {{ container_id.stdout_lines[0] }} | grep IPAddress | awk -F '\"' '{ print $4 }'" - register: container_ip - -- name: Pause a few moments because docker is not reliable - pause: - seconds: 40 + set_fact: container_ip="{{docker_containers[0].NetworkSettings.IPAddress}}" - name: Try to access the server - shell: "echo 'world' | nc {{ container_ip.stdout_lines[0] }} 2000" + shell: "echo 'world' | nc {{ container_ip }} 2000" register: docker_output - name: check that the script ran @@ -49,22 +38,12 @@ TEST: hello command: '/bin/sh -c "nc -l -p 2000 -e xargs -n1 echo $TEST"' detach: True - docker_api_version: "1.14" - -- name: Get the docker container id - shell: "docker ps | grep busybox | awk '{ print $1 }'" - register: container_id - name: Get the docker container ip - shell: "docker inspect {{ container_id.stdout_lines[0] }} | grep IPAddress | awk -F '\"' '{ print $4 }'" - register: container_ip - -- name: Pause a few moments because docker is not reliable - pause: - seconds: 40 + set_fact: container_ip="{{docker_containers[0].NetworkSettings.IPAddress}}" - name: Try to access the server - shell: "echo 'world' | nc {{ container_ip.stdout_lines[0] }} 2000" + shell: "echo 'world' | nc {{ container_ip }} 2000" register: docker_output - name: check that the script ran @@ -73,7 +52,7 @@ - "'hello world' in docker_output.stdout_lines" - name: Remove containers - shell: "docker rm $(docker ps -aq)" + shell: "docker rm -f $(docker ps -aq)" - name: Remove all images from the local docker shell: "docker rmi -f $(docker images -q)" diff --git a/test/integration/roles/test_docker/tasks/registry-tests.yml b/test/integration/roles/test_docker/tasks/registry-tests.yml index 57b4d252774..1ef330da5f6 100644 --- a/test/integration/roles/test_docker/tasks/registry-tests.yml +++ b/test/integration/roles/test_docker/tasks/registry-tests.yml @@ -19,11 +19,8 @@ - name: Push docker image into the private registry command: "docker push localhost:5000/mine" -- name: Remove containers - shell: "docker rm $(docker ps -aq)" - - name: Remove all images from the local docker - shell: "docker rmi -f $(docker images -q)" + shell: "docker rmi -f {{image_id.stdout_lines[0]}}" - name: Get number of images in docker command: "docker images" @@ -41,7 +38,6 @@ state: present pull: missing insecure_registry: True - docker_api_version: "1.14" - name: Run a small script in the new image docker: @@ -51,7 +47,6 @@ command: "nc -l -p 2000 -e xargs -n1 echo hello" detach: True insecure_registry: True - docker_api_version: "1.14" - name: Get the docker container id shell: "docker ps | grep mine | awk '{ print $1 }'" @@ -76,8 +71,9 @@ - name: Remove containers - shell: "docker rm $(docker ps -aq)" + shell: "docker rm -f $(docker ps -aq)" +- shell: docker images -q - name: Remove all images from the local docker shell: "docker rmi -f $(docker images -q)" @@ -157,7 +153,6 @@ state: running command: "nc -l -p 2000 -e xargs -n1 echo hello" detach: True - docker_api_version: "1.14" - name: Get the docker container id shell: "docker ps | grep mine | awk '{ print $1 }'" From a6a58d6947912328fd48e26ea1335bd9314f0135 Mon Sep 17 00:00:00 2001 From: Charles Paul Date: Thu, 10 Dec 2015 16:39:27 -0500 Subject: [PATCH 0092/1113] fix default host for non vcd service types --- lib/ansible/module_utils/vca.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ansible/module_utils/vca.py b/lib/ansible/module_utils/vca.py index 56341ec5559..ef89d545569 100644 --- a/lib/ansible/module_utils/vca.py +++ b/lib/ansible/module_utils/vca.py @@ -108,7 +108,10 @@ class VcaAnsibleModule(AnsibleModule): def create_instance(self): service_type = self.params.get('service_type', DEFAULT_SERVICE_TYPE) - host = self.params.get('host', LOGIN_HOST.get('service_type')) + if service_type == 'vcd': + host = self.params['host'] + else: + host = LOGIN_HOST[service_type] username = self.params['username'] version = self.params.get('api_version') From 37c4e9aee34df2f421942e86c8afd1fef2bee5f6 Mon Sep 17 00:00:00 2001 From: Abhijit Menon-Sen Date: Fri, 11 Dec 2015 07:11:48 +0530 Subject: [PATCH 0093/1113] Clean up debug logging around _low_level_execute_command We were logging the command to be executed many times, which made debug logs very hard to read. Now we do it only once. Also makes the logged ssh command line cut-and-paste-able (the lack of which has confused a number of people by now; the problem being that we pass the command as a single argument to execve(), so it doesn't need an extra level of quoting as it does when you try to run it by hand). --- lib/ansible/plugins/action/__init__.py | 25 ++++++------------------- lib/ansible/plugins/connection/ssh.py | 2 +- 2 files changed, 7 insertions(+), 20 deletions(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index 497143224a7..154404e474c 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -202,9 +202,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): tmp_mode = 0o755 cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode) - display.debug("executing _low_level_execute_command to create the tmp path") result = self._low_level_execute_command(cmd, sudoable=False) - display.debug("done with creation of tmp path") # error handling on this seems a little aggressive? if result['rc'] != 0: @@ -249,9 +247,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): cmd = self._connection._shell.remove(tmp_path, recurse=True) # If we have gotten here we have a working ssh configuration. # If ssh breaks we could leave tmp directories out on the remote system. - display.debug("calling _low_level_execute_command to remove the tmp path") self._low_level_execute_command(cmd, sudoable=False) - display.debug("done removing the tmp path") def _transfer_data(self, remote_path, data): ''' @@ -286,9 +282,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): ''' cmd = self._connection._shell.chmod(mode, path) - display.debug("calling _low_level_execute_command to chmod the remote path") res = self._low_level_execute_command(cmd, sudoable=sudoable) - display.debug("done with chmod call") return res def _remote_checksum(self, path, all_vars): @@ -299,9 +293,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): python_interp = all_vars.get('ansible_python_interpreter', 'python') cmd = self._connection._shell.checksum(path, python_interp) - display.debug("calling _low_level_execute_command to get the remote checksum") data = self._low_level_execute_command(cmd, sudoable=True) - display.debug("done getting the remote checksum") try: data2 = data['stdout'].strip().splitlines()[-1] if data2 == u'': @@ -329,9 +321,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): expand_path = '~%s' % self._play_context.become_user cmd = self._connection._shell.expand_user(expand_path) - display.debug("calling _low_level_execute_command to expand the remote user path") data = self._low_level_execute_command(cmd, sudoable=False) - display.debug("done expanding the remote user path") #initial_fragment = utils.last_non_blank_line(data['stdout']) initial_fragment = data['stdout'].strip().splitlines()[-1] @@ -448,9 +438,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): # specified in the play, not the sudo_user sudoable = False - display.debug("calling _low_level_execute_command() for command %s" % cmd) res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data) - display.debug("_low_level_execute_command returned ok") if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp: if self._play_context.become and self._play_context.become_user != 'root': @@ -498,21 +486,20 @@ class ActionBase(with_metaclass(ABCMeta, object)): if executable is not None: cmd = executable + ' -c ' + cmd - display.debug("in _low_level_execute_command() (%s)" % (cmd,)) + display.debug("_low_level_execute_command(): starting") if not cmd: # this can happen with powershell modules when there is no analog to a Windows command (like chmod) - display.debug("no command, exiting _low_level_execute_command()") + display.debug("_low_level_execute_command(): no command, exiting") return dict(stdout='', stderr='') allow_same_user = C.BECOME_ALLOW_SAME_USER same_user = self._play_context.become_user == self._play_context.remote_user if sudoable and self._play_context.become and (allow_same_user or not same_user): - display.debug("using become for this command") + display.debug("_low_level_execute_command(): using become for this command") cmd = self._play_context.make_become_cmd(cmd, executable=executable) - display.debug("executing the command %s through the connection" % cmd) + display.debug("_low_level_execute_command(): executing: %s" % (cmd,)) rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable) - display.debug("command execution done: rc=%s" % (rc)) # stdout and stderr may be either a file-like or a bytes object. # Convert either one to a text type @@ -530,11 +517,11 @@ class ActionBase(with_metaclass(ABCMeta, object)): else: err = stderr - display.debug("stdout=%s, stderr=%s" % (stdout, stderr)) - display.debug("done with _low_level_execute_command() (%s)" % (cmd,)) if rc is None: rc = 0 + display.debug("_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, stdout, stderr)) + return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err) def _get_first_available_file(self, faf, of=None, searchdir='files'): diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index 4251f8a63e8..a2abcf20aee 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -319,7 +319,7 @@ class Connection(ConnectionBase): Starts the command and communicates with it until it ends. ''' - display_cmd = map(pipes.quote, cmd[:-1]) + [cmd[-1]] + display_cmd = map(pipes.quote, cmd) display.vvv('SSH: EXEC {0}'.format(' '.join(display_cmd)), host=self.host) # Start the given command. If we don't need to pipeline data, we can try From bd0f9a4afc8406f71d65c50cda35a43549998fc1 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 10 Dec 2015 21:50:11 -0500 Subject: [PATCH 0094/1113] fix make complaint when git is not installed --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index ac4c07f4314..f62cffb2df8 100644 --- a/Makefile +++ b/Makefile @@ -44,7 +44,7 @@ GIT_HASH := $(shell git log -n 1 --format="%h") GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD | sed 's/[-_.\/]//g') GITINFO = .$(GIT_HASH).$(GIT_BRANCH) else -GITINFO = '' +GITINFO = "" endif ifeq ($(shell echo $(OS) | egrep -c 'Darwin|FreeBSD|OpenBSD'),1) From 58072c92fb762881679c31d050d519ccd83cb209 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Fri, 11 Dec 2015 09:32:19 -0500 Subject: [PATCH 0095/1113] removed 'bare' example in environment now shows how to use explicit templating --- docsite/rst/playbooks_environment.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/playbooks_environment.rst b/docsite/rst/playbooks_environment.rst index da050f007d5..f909bfcd6e6 100644 --- a/docsite/rst/playbooks_environment.rst +++ b/docsite/rst/playbooks_environment.rst @@ -31,7 +31,7 @@ The environment can also be stored in a variable, and accessed like so:: tasks: - apt: name=cobbler state=installed - environment: proxy_env + environment: "{{proxy_env}}" You can also use it at a playbook level:: From d9e510b19273d6a495e6694b6930e49de80f9500 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Fri, 11 Dec 2015 13:12:24 -0500 Subject: [PATCH 0096/1113] narrow down exception catching in block builds this was obscuring other errors and should have always been narrow scope --- lib/ansible/playbook/role/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py index 1c6b344a4fc..f308954f528 100644 --- a/lib/ansible/playbook/role/__init__.py +++ b/lib/ansible/playbook/role/__init__.py @@ -150,7 +150,7 @@ class Role(Base, Become, Conditional, Taggable): current_when = getattr(self, 'when')[:] current_when.extend(role_include.when) setattr(self, 'when', current_when) - + current_tags = getattr(self, 'tags')[:] current_tags.extend(role_include.tags) setattr(self, 'tags', current_tags) @@ -174,7 +174,7 @@ class Role(Base, Become, Conditional, Taggable): if task_data: try: self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader) - except: + except AssertionError: raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name , obj=task_data) handler_data = self._load_role_yaml('handlers') From 97554fc222628057d7f3255ce2caac8dfe5d783f Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 11 Dec 2015 00:18:47 -0500 Subject: [PATCH 0097/1113] Fixing filter test for extract to use proper group --- test/integration/roles/test_filters/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/roles/test_filters/tasks/main.yml b/test/integration/roles/test_filters/tasks/main.yml index af6c5d49def..cb1549d3f78 100644 --- a/test/integration/roles/test_filters/tasks/main.yml +++ b/test/integration/roles/test_filters/tasks/main.yml @@ -77,4 +77,4 @@ - "31 == ['x','y']|map('extract',{'x':42,'y':31})|list|last" - "'local' == ['localhost']|map('extract',hostvars,'ansible_connection')|list|first" - "'local' == ['localhost']|map('extract',hostvars,['ansible_connection'])|list|first" - - "'ungrouped' == ['localhost']|map('extract',hostvars,['vars','group_names',0])|list|first" + - "'amazon' == ['localhost']|map('extract',hostvars,['vars','group_names',0])|list|first" From 7f7e730dea36dbb709b47c39ca1a28cb9f6cb3f1 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 11 Dec 2015 14:55:44 -0500 Subject: [PATCH 0098/1113] Don't mark hosts failed if they've moved to a rescue portion of a block Fixes #13521 --- lib/ansible/plugins/strategy/__init__.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index 15636b580d1..91ca4e86383 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -30,6 +30,11 @@ from jinja2.exceptions import UndefinedError from ansible import constants as C from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable +<<<<<<< Updated upstream +======= +from ansible.executor.play_iterator import PlayIterator +from ansible.executor.process.worker import WorkerProcess +>>>>>>> Stashed changes from ansible.executor.task_result import TaskResult from ansible.inventory.host import Host from ansible.inventory.group import Group @@ -202,8 +207,10 @@ class StrategyBase: [iterator.mark_host_failed(h) for h in self._inventory.get_hosts(iterator._play.hosts) if h.name not in self._tqm._unreachable_hosts] else: iterator.mark_host_failed(host) - self._tqm._failed_hosts[host.name] = True - self._tqm._stats.increment('failures', host.name) + (state, tmp_task) = iterator.get_next_task_for_host(host, peek=True) + if state.run_state != PlayIterator.ITERATING_RESCUE: + self._tqm._failed_hosts[host.name] = True + self._tqm._stats.increment('failures', host.name) else: self._tqm._stats.increment('ok', host.name) self._tqm.send_callback('v2_runner_on_failed', task_result, ignore_errors=task.ignore_errors) From de71171fc21a81a343eb28ed25472ef4aa17406c Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Fri, 11 Dec 2015 15:10:48 -0500 Subject: [PATCH 0099/1113] removed merge conflict --- lib/ansible/plugins/strategy/__init__.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index 91ca4e86383..5d31a3dba8d 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -30,11 +30,7 @@ from jinja2.exceptions import UndefinedError from ansible import constants as C from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable -<<<<<<< Updated upstream -======= from ansible.executor.play_iterator import PlayIterator -from ansible.executor.process.worker import WorkerProcess ->>>>>>> Stashed changes from ansible.executor.task_result import TaskResult from ansible.inventory.host import Host from ansible.inventory.group import Group From ae988ed753f69cb2a7bf115c7cee41e53f01ef3e Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Fri, 11 Dec 2015 15:35:57 -0500 Subject: [PATCH 0100/1113] avoid set to unique hosts to preserver order swiched to using a list comp and set to still unique but keep expected order fixes #13522 --- lib/ansible/inventory/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index 3c1331e7065..95e193f381a 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -194,7 +194,8 @@ class Inventory(object): if self._restriction is not None: hosts = [ h for h in hosts if h in self._restriction ] - HOSTS_PATTERNS_CACHE[pattern_hash] = list(set(hosts)) + seen = set() + HOSTS_PATTERNS_CACHE[pattern_hash] = [x for x in hosts if x not in seen and not seen.add(x)] return HOSTS_PATTERNS_CACHE[pattern_hash][:] From 120b9a7ac6274c54d091291587b0c9ec865905a1 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 10 Dec 2015 18:03:25 -0500 Subject: [PATCH 0101/1113] Changing the way workers are forked --- bin/ansible | 1 + lib/ansible/executor/process/worker.py | 116 ++++++++------------- lib/ansible/executor/task_queue_manager.py | 31 +----- lib/ansible/plugins/strategy/__init__.py | 48 ++++----- lib/ansible/plugins/strategy/linear.py | 5 +- 5 files changed, 74 insertions(+), 127 deletions(-) diff --git a/bin/ansible b/bin/ansible index 7e1aa01a932..627510a72e8 100755 --- a/bin/ansible +++ b/bin/ansible @@ -60,6 +60,7 @@ if __name__ == '__main__': try: display = Display() + display.debug("starting run") sub = None try: diff --git a/lib/ansible/executor/process/worker.py b/lib/ansible/executor/process/worker.py index a1a83a5ddaa..73f5faa78b6 100644 --- a/lib/ansible/executor/process/worker.py +++ b/lib/ansible/executor/process/worker.py @@ -59,14 +59,18 @@ class WorkerProcess(multiprocessing.Process): for reading later. ''' - def __init__(self, tqm, main_q, rslt_q, hostvars_manager, loader): + def __init__(self, rslt_q, task_vars, host, task, play_context, loader, variable_manager, shared_loader_obj): super(WorkerProcess, self).__init__() # takes a task queue manager as the sole param: - self._main_q = main_q - self._rslt_q = rslt_q - self._hostvars = hostvars_manager - self._loader = loader + self._rslt_q = rslt_q + self._task_vars = task_vars + self._host = host + self._task = task + self._play_context = play_context + self._loader = loader + self._variable_manager = variable_manager + self._shared_loader_obj = shared_loader_obj # dupe stdin, if we have one self._new_stdin = sys.stdin @@ -97,73 +101,45 @@ class WorkerProcess(multiprocessing.Process): if HAS_ATFORK: atfork() - while True: - task = None - try: - #debug("waiting for work") - (host, task, basedir, zip_vars, compressed_vars, play_context, shared_loader_obj) = self._main_q.get(block=False) - - if compressed_vars: - job_vars = json.loads(zlib.decompress(zip_vars)) - else: - job_vars = zip_vars - - job_vars['hostvars'] = self._hostvars.hostvars() - - debug("there's work to be done! got a task/handler to work on: %s" % task) - - # because the task queue manager starts workers (forks) before the - # playbook is loaded, set the basedir of the loader inherted by - # this fork now so that we can find files correctly - self._loader.set_basedir(basedir) - - # Serializing/deserializing tasks does not preserve the loader attribute, - # since it is passed to the worker during the forking of the process and - # would be wasteful to serialize. So we set it here on the task now, and - # the task handles updating parent/child objects as needed. - task.set_loader(self._loader) - - # execute the task and build a TaskResult from the result - debug("running TaskExecutor() for %s/%s" % (host, task)) - executor_result = TaskExecutor( - host, - task, - job_vars, - play_context, - self._new_stdin, - self._loader, - shared_loader_obj, - ).run() - debug("done running TaskExecutor() for %s/%s" % (host, task)) - task_result = TaskResult(host, task, executor_result) - - # put the result on the result queue - debug("sending task result") - self._rslt_q.put(task_result) - debug("done sending task result") - - except queue.Empty: - time.sleep(0.0001) - except AnsibleConnectionFailure: + try: + # execute the task and build a TaskResult from the result + debug("running TaskExecutor() for %s/%s" % (self._host, self._task)) + executor_result = TaskExecutor( + self._host, + self._task, + self._task_vars, + self._play_context, + self._new_stdin, + self._loader, + self._shared_loader_obj, + ).run() + + debug("done running TaskExecutor() for %s/%s" % (self._host, self._task)) + self._host.vars = dict() + self._host.groups = [] + task_result = TaskResult(self._host, self._task, executor_result) + + # put the result on the result queue + debug("sending task result") + self._rslt_q.put(task_result) + debug("done sending task result") + + except AnsibleConnectionFailure: + self._host.vars = dict() + self._host.groups = [] + task_result = TaskResult(self._host, self._task, dict(unreachable=True)) + self._rslt_q.put(task_result, block=False) + + except Exception as e: + if not isinstance(e, (IOError, EOFError, KeyboardInterrupt)) or isinstance(e, TemplateNotFound): try: - if task: - task_result = TaskResult(host, task, dict(unreachable=True)) - self._rslt_q.put(task_result, block=False) + self._host.vars = dict() + self._host.groups = [] + task_result = TaskResult(self._host, self._task, dict(failed=True, exception=traceback.format_exc(), stdout='')) + self._rslt_q.put(task_result, block=False) except: - break - except Exception as e: - if isinstance(e, (IOError, EOFError, KeyboardInterrupt)) and not isinstance(e, TemplateNotFound): - break - else: - try: - if task: - task_result = TaskResult(host, task, dict(failed=True, exception=traceback.format_exc(), stdout='')) - self._rslt_q.put(task_result, block=False) - except: - debug("WORKER EXCEPTION: %s" % e) - debug("WORKER EXCEPTION: %s" % traceback.format_exc()) - break + debug("WORKER EXCEPTION: %s" % e) + debug("WORKER EXCEPTION: %s" % traceback.format_exc()) debug("WORKER PROCESS EXITING") - diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py index e2b29a5282c..9189ab95819 100644 --- a/lib/ansible/executor/task_queue_manager.py +++ b/lib/ansible/executor/task_queue_manager.py @@ -102,11 +102,7 @@ class TaskQueueManager: for i in xrange(num): main_q = multiprocessing.Queue() rslt_q = multiprocessing.Queue() - - prc = WorkerProcess(self, main_q, rslt_q, self._hostvars_manager, self._loader) - prc.start() - - self._workers.append((prc, main_q, rslt_q)) + self._workers.append([None, main_q, rslt_q]) self._result_prc = ResultProcess(self._final_q, self._workers) self._result_prc.start() @@ -195,31 +191,12 @@ class TaskQueueManager: new_play = play.copy() new_play.post_validate(templar) - class HostVarsManager(SyncManager): - pass - - hostvars = HostVars( + self.hostvars = HostVars( inventory=self._inventory, variable_manager=self._variable_manager, loader=self._loader, ) - HostVarsManager.register( - 'hostvars', - callable=lambda: hostvars, - # FIXME: this is the list of exposed methods to the DictProxy object, plus our - # special ones (set_variable_manager/set_inventory). There's probably a better way - # to do this with a proper BaseProxy/DictProxy derivative - exposed=( - 'set_variable_manager', 'set_inventory', '__contains__', '__delitem__', - 'set_nonpersistent_facts', 'set_host_facts', 'set_host_variable', - '__getitem__', '__len__', '__setitem__', 'clear', 'copy', 'get', 'has_key', - 'items', 'keys', 'pop', 'popitem', 'setdefault', 'update', 'values' - ), - ) - self._hostvars_manager = HostVarsManager() - self._hostvars_manager.start() - # Fork # of forks, # of hosts or serial, whichever is lowest contenders = [self._options.forks, play.serial, len(self._inventory.get_hosts(new_play.hosts))] contenders = [ v for v in contenders if v is not None and v > 0 ] @@ -259,7 +236,6 @@ class TaskQueueManager: # and run the play using the strategy and cleanup on way out play_return = strategy.run(iterator, play_context) self._cleanup_processes() - self._hostvars_manager.shutdown() return play_return def cleanup(self): @@ -275,7 +251,8 @@ class TaskQueueManager: for (worker_prc, main_q, rslt_q) in self._workers: rslt_q.close() main_q.close() - worker_prc.terminate() + if worker_prc and worker_prc.is_alive(): + worker_prc.terminate() def clear_failed_hosts(self): self._failed_hosts = dict() diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index 5d31a3dba8d..ea30b800b02 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -31,6 +31,7 @@ from jinja2.exceptions import UndefinedError from ansible import constants as C from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable from ansible.executor.play_iterator import PlayIterator +from ansible.executor.process.worker import WorkerProcess from ansible.executor.task_result import TaskResult from ansible.inventory.host import Host from ansible.inventory.group import Group @@ -138,38 +139,29 @@ class StrategyBase: display.debug("entering _queue_task() for %s/%s" % (host, task)) + task_vars['hostvars'] = self._tqm.hostvars # and then queue the new task display.debug("%s - putting task (%s) in queue" % (host, task)) try: display.debug("worker is %d (out of %d available)" % (self._cur_worker+1, len(self._workers))) - (worker_prc, main_q, rslt_q) = self._workers[self._cur_worker] - self._cur_worker += 1 - if self._cur_worker >= len(self._workers): - self._cur_worker = 0 - # create a dummy object with plugin loaders set as an easier # way to share them with the forked processes shared_loader_obj = SharedPluginLoaderObj() - # compress (and convert) the data if so configured, which can - # help a lot when the variable dictionary is huge. We pop the - # hostvars out of the task variables right now, due to the fact - # that they're not JSON serializable - compressed_vars = False - if C.DEFAULT_VAR_COMPRESSION_LEVEL > 0: - zip_vars = zlib.compress(json.dumps(task_vars), C.DEFAULT_VAR_COMPRESSION_LEVEL) - compressed_vars = True - # we're done with the original dict now, so delete it to - # try and reclaim some memory space, which is helpful if the - # data contained in the dict is very large - del task_vars - else: - zip_vars = task_vars # noqa (pyflakes false positive because task_vars is deleted in the conditional above) - - # and queue the task - main_q.put((host, task, self._loader.get_basedir(), zip_vars, compressed_vars, play_context, shared_loader_obj)) + while True: + (worker_prc, main_q, rslt_q) = self._workers[self._cur_worker] + if worker_prc is None or not worker_prc.is_alive(): + worker_prc = WorkerProcess(rslt_q, task_vars, host, task, play_context, self._loader, self._variable_manager, shared_loader_obj) + self._workers[self._cur_worker][0] = worker_prc + worker_prc.start() + break + self._cur_worker += 1 + if self._cur_worker >= len(self._workers): + self._cur_worker = 0 + time.sleep(0.0001) + del task_vars self._pending_results += 1 except (EOFError, IOError, AssertionError) as e: # most likely an abort @@ -177,7 +169,7 @@ class StrategyBase: return display.debug("exiting _queue_task() for %s/%s" % (host, task)) - def _process_pending_results(self, iterator): + def _process_pending_results(self, iterator, one_pass=False): ''' Reads results off the final queue and takes appropriate action based on the result (executing callbacks, updating state, etc.). @@ -247,13 +239,11 @@ class StrategyBase: new_host_info = result_item.get('add_host', dict()) self._add_host(new_host_info, iterator) - self._tqm._hostvars_manager.hostvars().set_inventory(self._inventory) elif result[0] == 'add_group': host = result[1] result_item = result[2] self._add_group(host, result_item) - self._tqm._hostvars_manager.hostvars().set_inventory(self._inventory) elif result[0] == 'notify_handler': task_result = result[1] @@ -283,7 +273,6 @@ class StrategyBase: for target_host in host_list: self._variable_manager.set_nonpersistent_facts(target_host, {var_name: var_value}) - self._tqm._hostvars_manager.hostvars().set_nonpersistent_facts(target_host, {var_name: var_value}) elif result[0] in ('set_host_var', 'set_host_facts'): host = result[1] @@ -316,21 +305,22 @@ class StrategyBase: for target_host in host_list: self._variable_manager.set_host_variable(target_host, var_name, var_value) - self._tqm._hostvars_manager.hostvars().set_host_variable(target_host, var_name, var_value) elif result[0] == 'set_host_facts': facts = result[4] if task.action == 'set_fact': self._variable_manager.set_nonpersistent_facts(actual_host, facts) - self._tqm._hostvars_manager.hostvars().set_nonpersistent_facts(actual_host, facts) else: self._variable_manager.set_host_facts(actual_host, facts) - self._tqm._hostvars_manager.hostvars().set_host_facts(actual_host, facts) else: raise AnsibleError("unknown result message received: %s" % result[0]) + except Queue.Empty: time.sleep(0.0001) + if one_pass: + break + return ret_results def _wait_on_pending_results(self, iterator): diff --git a/lib/ansible/plugins/strategy/linear.py b/lib/ansible/plugins/strategy/linear.py index 8a8d5c084af..8c94267cf46 100644 --- a/lib/ansible/plugins/strategy/linear.py +++ b/lib/ansible/plugins/strategy/linear.py @@ -169,6 +169,7 @@ class StrategyModule(StrategyBase): skip_rest = False choose_step = True + results = [] for (host, task) in host_tasks: if not task: continue @@ -243,12 +244,14 @@ class StrategyModule(StrategyBase): if run_once: break + results += self._process_pending_results(iterator, one_pass=True) + # go to next host/task group if skip_rest: continue display.debug("done queuing things up, now waiting for results queue to drain") - results = self._wait_on_pending_results(iterator) + results += self._wait_on_pending_results(iterator) host_results.extend(results) if not work_to_do and len(iterator.get_failed_hosts()) > 0: From 8db291274519331ed186f0b9dc0711f6754cb25d Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 12 Dec 2015 12:59:00 -0500 Subject: [PATCH 0102/1113] corrected section anchors --- docsite/rst/developing_releases.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docsite/rst/developing_releases.rst b/docsite/rst/developing_releases.rst index 1eeb2421210..2332459c30d 100644 --- a/docsite/rst/developing_releases.rst +++ b/docsite/rst/developing_releases.rst @@ -4,7 +4,7 @@ Releases .. contents:: Topics :local: -.. schedule:: +.. _schedule: Release Schedule ```````````````` @@ -16,7 +16,7 @@ When a fix/feature gets added to the `devel` branch it will be part of the next Sometimes an RC can be extended by a few days if a bugfix makes a change that can have far reaching consequences, so users have enough time to find any new issues that may stem from this. -.. methods:: +.. _methods: Release methods ```````````````` @@ -25,7 +25,7 @@ Ansible normally goes through a 'release candidate', issuing an RC1 for a releas Otherwise fixes will be applied and an RC2 will be provided for testing and if no bugs after 2 days, the final release will be made, iterating this last step and incrementing the candidate number as we find major bugs. -.. freezing:: +.. _freezing: Release feature freeze `````````````````````` From 0a112a1b0617d4087ae3e46ea031101af204d48e Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 12 Dec 2015 13:14:14 -0500 Subject: [PATCH 0103/1113] fixed formating issues with rst --- docsite/rst/porting_guide_2.0.rst | 44 +++++++++++++++---------------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/docsite/rst/porting_guide_2.0.rst b/docsite/rst/porting_guide_2.0.rst index 9c26a4b1611..8d69ecd4403 100644 --- a/docsite/rst/porting_guide_2.0.rst +++ b/docsite/rst/porting_guide_2.0.rst @@ -56,12 +56,11 @@ uses key=value escaping which has not changed. The other option is to check for "msg": "Testing some things" * porting task includes - * More dynamic. Corner-case formats that were not supposed to work now do not, as expected. - * variables defined in the yaml dict format https://github.com/ansible/ansible/issues/13324 - * variable precedence +* More dynamic. Corner-case formats that were not supposed to work now do not, as expected. +* variables defined in the yaml dict format https://github.com/ansible/ansible/issues/13324 * templating (variables in playbooks and template lookups) has improved with regard to keeping the original instead of turning everything into a string. - If you need the old behavior, quote the value to pass it around as a string. - Empty variables and variables set to null in yaml are no longer converted to empty strings. They will retain the value of `None`. + If you need the old behavior, quote the value to pass it around as a string. +* Empty variables and variables set to null in yaml are no longer converted to empty strings. They will retain the value of `None`. You can override the `null_representation` setting to an empty string in your config file by setting the `ANSIBLE_NULL_REPRESENTATION` environment variable. * Extras callbacks must be whitelisted in ansible.cfg. Copying is no longer necessary but whitelisting in ansible.cfg must be completed. * dnf module has been rewritten. Some minor changes in behavior may be observed. @@ -72,26 +71,26 @@ Deprecated While all items listed here will show a deprecation warning message, they still work as they did in 1.9.x. Please note that they will be removed in 2.2 (Ansible always waits two major releases to remove a deprecated feature). -* Bare variables in with_ loops should instead use the “{{var}}” syntax, which helps eliminate ambiguity. +* Bare variables in `with_` loops should instead use the “{{var}}” syntax, which helps eliminate ambiguity. * The ansible-galaxy text format requirements file. Users should use the YAML format for requirements instead. -* Undefined variables within a with_ loop’s list currently do not interrupt the loop, but they do issue a warning; in the future, they will issue an error. +* Undefined variables within a `with_` loop’s list currently do not interrupt the loop, but they do issue a warning; in the future, they will issue an error. * Using variables for task parameters is unsafe and will be removed in a future version. For example:: - hosts: localhost - gather_facts: no - vars: - debug_params: - msg: "hello there" - tasks: - - debug: "{{debug_params}}" + gather_facts: no + vars: + debug_params: + msg: "hello there" + tasks: + - debug: "{{debug_params}}" * Host patterns should use a comma (,) or colon (:) instead of a semicolon (;) to separate hosts/groups in the pattern. * Ranges specified in host patterns should use the [x:y] syntax, instead of [x-y]. * Playbooks using privilege escalation should always use “become*” options rather than the old su*/sudo* options. -* The “short form” for vars_prompt is no longer supported. -For example:: +* The “short form” for vars_prompt is no longer supported. + For example:: -vars_prompt: + vars_prompt: variable_name: "Prompt string" * Specifying variables at the top level of a task include statement is no longer supported. For example:: @@ -101,21 +100,21 @@ vars_prompt: Should now be:: -- include: foo.yml - args: - a: 1 + - include: foo.yml + args: + a: 1 * Setting any_errors_fatal on a task is no longer supported. This should be set at the play level only. * Bare variables in the `environment` dictionary (for plays/tasks/etc.) are no longer supported. Variables specified there should use the full variable syntax: ‘{{foo}}’. * Tags should no longer be specified with other parameters in a task include. Instead, they should be specified as an option on the task. -For example:: + For example:: - include: foo.yml tags=a,b,c -Should be:: + Should be:: - include: foo.yml - tags: [a, b, c] + tags: [a, b, c] * The first_available_file option on tasks has been deprecated. Users should use the with_first_found option or lookup (‘first_found’, …) plugin. @@ -125,7 +124,6 @@ Porting plugins In ansible-1.9.x, you would generally copy an existing plugin to create a new one. Simply implementing the methods and attributes that the caller of the plugin expected made it a plugin of that type. In ansible-2.0, most plugins are implemented by subclassing a base class for each plugin type. This way the custom plugin does not need to contain methods which are not customized. -.. note:: Lookup plugins -------------- From d7b516f75dc879ad350b285e7ddc398418bf85fd Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 12 Dec 2015 13:16:40 -0500 Subject: [PATCH 0104/1113] added releases doc --- docsite/rst/developing.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docsite/rst/developing.rst b/docsite/rst/developing.rst index 2a258993019..c5a1dca0611 100644 --- a/docsite/rst/developing.rst +++ b/docsite/rst/developing.rst @@ -11,6 +11,7 @@ Learn how to build modules of your own in any language, and also how to extend A developing_modules developing_plugins developing_test_pr + developing_releases Developers will also likely be interested in the fully-discoverable in :doc:`tower`. It's great for embedding Ansible in all manner of applications. From 8e445c551a23f52e901c9b1d2603e496a2e88c11 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 12 Dec 2015 13:43:10 -0500 Subject: [PATCH 0105/1113] removed unused imports in galaxy/cli --- lib/ansible/cli/galaxy.py | 35 ++++++++++++++++------------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 01e0475b24b..0f9074da935 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -25,7 +25,6 @@ __metaclass__ = type import os.path import sys import yaml -import json import time from collections import defaultdict @@ -40,7 +39,6 @@ from ansible.galaxy.role import GalaxyRole from ansible.galaxy.login import GalaxyLogin from ansible.galaxy.token import GalaxyToken from ansible.playbook.role.requirement import RoleRequirement -from ansible.module_utils.urls import open_url try: from __main__ import display @@ -61,10 +59,10 @@ class GalaxyCLI(CLI): "remove": "delete a role from your roles path", "search": "query the Galaxy API", "setup": "add a TravisCI integration to Galaxy", - } + } SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" ) - + def __init__(self, args): self.VALID_ACTIONS = self.available_commands.keys() self.VALID_ACTIONS.sort() @@ -101,7 +99,7 @@ class GalaxyCLI(CLI): usage = "usage: %%prog [%s] [--help] [options] ..." % "|".join(self.VALID_ACTIONS), epilog = "\nSee '%s --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0]) ) - + self.set_action() # options specific to actions @@ -131,7 +129,7 @@ class GalaxyCLI(CLI): self.parser.add_option('-n', '--no-deps', dest='no_deps', action='store_true', default=False, help='Don\'t download roles listed as dependencies') self.parser.add_option('-r', '--role-file', dest='role_file', - help='A file containing a list of roles to be imported') + help='A file containing a list of roles to be imported') elif self.action == "remove": self.parser.set_usage("usage: %prog remove role1 role2 ...") elif self.action == "list": @@ -190,7 +188,7 @@ class GalaxyCLI(CLI): # if not offline, get connect to galaxy api if self.action in ("import","info","install","search","login","setup","delete") or \ - (self.action == 'init' and not self.options.offline): + (self.action == 'init' and not self.options.offline): self.api = GalaxyAPI(self.galaxy) self.execute() @@ -544,7 +542,7 @@ class GalaxyCLI(CLI): def execute_search(self): page_size = 1000 search = None - + if len(self.args): terms = [] for i in range(len(self.args)): @@ -556,7 +554,7 @@ class GalaxyCLI(CLI): response = self.api.search_roles(search, platforms=self.options.platforms, tags=self.options.tags, author=self.options.author, page_size=page_size) - + if response['count'] == 0: display.display("No roles match your search.", color="yellow") return True @@ -578,7 +576,7 @@ class GalaxyCLI(CLI): data += (format_str % ("----", "-----------")) for role in response['results']: data += (format_str % (role['username'] + '.' + role['name'],role['description'])) - + self.pager(data) return True @@ -595,12 +593,12 @@ class GalaxyCLI(CLI): github_token = self.options.token galaxy_response = self.api.authenticate(github_token) - + if self.options.token is None: # Remove the token we created login.remove_github_token() - - # Store the Galaxy token + + # Store the Galaxy token token = GalaxyToken() token.set(galaxy_response['token']) @@ -611,7 +609,7 @@ class GalaxyCLI(CLI): """ Import a role into Galaxy """ - + colors = { 'INFO': 'normal', 'WARNING': 'yellow', @@ -631,7 +629,7 @@ class GalaxyCLI(CLI): else: # Submit an import request task = self.api.create_import_task(github_user, github_repo, reference=self.options.reference) - + if len(task) > 1: # found multiple roles associated with github_user/github_repo display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user,github_repo), @@ -693,7 +691,7 @@ class GalaxyCLI(CLI): if len(self.args) < 4: raise AnsibleError("Missing one or more arguments. Expecting: source github_user github_repo secret") return 0 - + secret = self.args.pop() github_repo = self.args.pop() github_user = self.args.pop() @@ -711,7 +709,7 @@ class GalaxyCLI(CLI): if len(self.args) < 2: raise AnsibleError("Missing one or more arguments. Expected: github_user github_repo") - + github_repo = self.args.pop() github_user = self.args.pop() resp = self.api.delete_role(github_user, github_repo) @@ -722,9 +720,8 @@ class GalaxyCLI(CLI): display.display("------ --------------- ----------") for role in resp['deleted_roles']: display.display("%-8s %-15s %s" % (role.id,role.namespace,role.name)) - + display.display(resp['status']) return True - From 3c4d2fc6f2cdeba074511fb591134014cf77032d Mon Sep 17 00:00:00 2001 From: Michael Scherer Date: Sat, 12 Dec 2015 19:31:19 +0100 Subject: [PATCH 0106/1113] Add tests for ansible.module_utils.known_hosts --- .../module_utils/basic/test_known_hosts.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 test/units/module_utils/basic/test_known_hosts.py diff --git a/test/units/module_utils/basic/test_known_hosts.py b/test/units/module_utils/basic/test_known_hosts.py new file mode 100644 index 00000000000..952184bfec9 --- /dev/null +++ b/test/units/module_utils/basic/test_known_hosts.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# (c) 2015, Michael Scherer +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +from ansible.compat.tests import unittest +from ansible.module_utils import known_hosts + +class TestAnsibleModuleKnownHosts(unittest.TestCase): + urls = { + 'ssh://one.example.org/example.git': + {'is_ssh_url': True, 'get_fqdn': 'one.example.org'}, + 'ssh+git://two.example.org/example.git': + {'is_ssh_url': True, 'get_fqdn': 'two.example.org'}, + 'rsync://three.example.org/user/example.git': + {'is_ssh_url': False, 'get_fqdn': 'three.example.org'}, + 'git@four.example.org:user/example.git': + {'is_ssh_url': True, 'get_fqdn': 'four.example.org'}, + 'git+ssh://five.example.org/example.git': + {'is_ssh_url': True, 'get_fqdn': 'five.example.org'}, + 'ssh://six.example.org:21/example.org': + {'is_ssh_url': True, 'get_fqdn': 'six.example.org'}, + } + + def test_is_ssh_url(self): + for u in self.urls: + self.assertEqual(known_hosts.is_ssh_url(u), self.urls[u]['is_ssh_url']) + + def test_get_fqdn(self): + for u in self.urls: + self.assertEqual(known_hosts.get_fqdn(u), self.urls[u]['get_fqdn']) + + + From 99e46440bdaf622958f78cebecb52dec7ed67669 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 12 Dec 2015 16:10:18 -0500 Subject: [PATCH 0107/1113] changed shell delimiters for csh fixes #13459 --- lib/ansible/plugins/shell/csh.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/ansible/plugins/shell/csh.py b/lib/ansible/plugins/shell/csh.py index 1c383d133c6..bd210f12feb 100644 --- a/lib/ansible/plugins/shell/csh.py +++ b/lib/ansible/plugins/shell/csh.py @@ -24,6 +24,8 @@ class ShellModule(ShModule): # How to end lines in a python script one-liner _SHELL_EMBEDDED_PY_EOL = '\\\n' _SHELL_REDIRECT_ALLNULL = '>& /dev/null' + _SHELL_SUB_LEFT = '"`' + _SHELL_SUB_RIGHT = '`"' def env_prefix(self, **kwargs): return 'env %s' % super(ShellModule, self).env_prefix(**kwargs) From f3bedbae2991b540421d64f5be942ec7c84fdf7d Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 12 Dec 2015 17:50:55 -0500 Subject: [PATCH 0108/1113] simplified skippy thanks agaffney! --- lib/ansible/plugins/callback/skippy.py | 159 +------------------------ 1 file changed, 6 insertions(+), 153 deletions(-) diff --git a/lib/ansible/plugins/callback/skippy.py b/lib/ansible/plugins/callback/skippy.py index 495943417fd..306d1a534e5 100644 --- a/lib/ansible/plugins/callback/skippy.py +++ b/lib/ansible/plugins/callback/skippy.py @@ -19,10 +19,9 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -from ansible import constants as C -from ansible.plugins.callback import CallbackBase +from ansible.plugins.callback.default import CallbackModule as CallbackModule_default -class CallbackModule(CallbackBase): +class CallbackModule(CallbackModule_default): ''' This is the default callback interface, which simply prints messages @@ -33,154 +32,8 @@ class CallbackModule(CallbackBase): CALLBACK_TYPE = 'stdout' CALLBACK_NAME = 'skippy' - def v2_runner_on_failed(self, result, ignore_errors=False): - delegated_vars = result._result.get('_ansible_delegated_vars', None) - if 'exception' in result._result: - if self._display.verbosity < 3: - # extract just the actual error message from the exception text - error = result._result['exception'].strip().split('\n')[-1] - msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error - else: - msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'] - - self._display.display(msg, color='red') - - # finally, remove the exception from the result so it's not shown every time - del result._result['exception'] - - if result._task.loop and 'results' in result._result: - self._process_items(result) - else: - if delegated_vars: - self._display.display("fatal: [%s -> %s]: FAILED! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color='red') - else: - self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red') - - if result._task.ignore_errors: - self._display.display("...ignoring", color='cyan') - - def v2_runner_on_ok(self, result): - - delegated_vars = result._result.get('_ansible_delegated_vars', None) - if result._task.action == 'include': - return - elif result._result.get('changed', False): - if delegated_vars: - msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) - else: - msg = "changed: [%s]" % result._host.get_name() - color = 'yellow' - else: - if delegated_vars: - msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) - else: - msg = "ok: [%s]" % result._host.get_name() - color = 'green' - - if result._task.loop and 'results' in result._result: - self._process_items(result) - else: - - if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result: - msg += " => %s" % (self._dump_results(result._result),) - self._display.display(msg, color=color) - - self._handle_warnings(result._result) - - def v2_runner_on_unreachable(self, result): - delegated_vars = result._result.get('_ansible_delegated_vars', None) - if delegated_vars: - self._display.display("fatal: [%s -> %s]: UNREACHABLE! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color='red') - else: - self._display.display("fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red') - - def v2_playbook_on_no_hosts_matched(self): - self._display.display("skipping: no hosts matched", color='cyan') - - def v2_playbook_on_no_hosts_remaining(self): - self._display.banner("NO MORE HOSTS LEFT") - - def v2_playbook_on_task_start(self, task, is_conditional): - self._display.banner("TASK [%s]" % task.get_name().strip()) - if self._display.verbosity > 2: - path = task.get_path() - if path: - self._display.display("task path: %s" % path, color='dark gray') - - def v2_playbook_on_cleanup_task_start(self, task): - self._display.banner("CLEANUP TASK [%s]" % task.get_name().strip()) - - def v2_playbook_on_handler_task_start(self, task): - self._display.banner("RUNNING HANDLER [%s]" % task.get_name().strip()) - - def v2_playbook_on_play_start(self, play): - name = play.get_name().strip() - if not name: - msg = "PLAY" - else: - msg = "PLAY [%s]" % name - - self._display.banner(msg) - - def v2_on_file_diff(self, result): - if result._task.loop and 'results' in result._result: - for res in result._result['results']: - newres = self._copy_result(result) - res['item'] = self._get_item(res) - newres._result = res - - self.v2_on_file_diff(newres) - elif 'diff' in result._result and result._result['diff']: - self._display.display(self._get_diff(result._result['diff'])) - - def v2_playbook_item_on_ok(self, result): - - delegated_vars = result._result.get('_ansible_delegated_vars', None) - if result._task.action == 'include': - return - elif result._result.get('changed', False): - if delegated_vars: - msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) - else: - msg = "changed: [%s]" % result._host.get_name() - color = 'yellow' - else: - if delegated_vars: - msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) - else: - msg = "ok: [%s]" % result._host.get_name() - color = 'green' - - msg += " => (item=%s)" % (result._result['item'],) - - if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result: - msg += " => %s" % self._dump_results(result._result) - self._display.display(msg, color=color) - - def v2_playbook_item_on_failed(self, result): - delegated_vars = result._result.get('_ansible_delegated_vars', None) - if 'exception' in result._result: - if self._display.verbosity < 3: - # extract just the actual error message from the exception text - error = result._result['exception'].strip().split('\n')[-1] - msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error - else: - msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'] - - self._display.display(msg, color='red') - - # finally, remove the exception from the result so it's not shown every time - del result._result['exception'] - - if delegated_vars: - self._display.display("failed: [%s -> %s] => (item=%s) => %s" % (result._host.get_name(), delegated_vars['ansible_host'], result._result['item'], self._dump_results(result._result)), color='red') - else: - self._display.display("failed: [%s] => (item=%s) => %s" % (result._host.get_name(), result._result['item'], self._dump_results(result._result)), color='red') - - self._handle_warnings(result._result) - - def v2_playbook_on_include(self, included_file): - msg = 'included: %s for %s' % (included_file._filename, ", ".join([h.name for h in included_file._hosts])) - color = 'cyan' - self._display.display(msg, color='cyan') + def v2_runner_on_skipped(self, result): + pass + def v2_playbook_item_on_skipped(self, result): + pass From d73562902b289e7fd7e2e5a37e82b00c83a16369 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sun, 13 Dec 2015 00:13:13 -0500 Subject: [PATCH 0109/1113] debug now validates its params simplified var handling made default message the same as in pre 2.0 fixes #13532 --- lib/ansible/plugins/action/debug.py | 35 ++++++++++++++++------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/lib/ansible/plugins/action/debug.py b/lib/ansible/plugins/action/debug.py index a0ffb714044..2af20eddfc4 100644 --- a/lib/ansible/plugins/action/debug.py +++ b/lib/ansible/plugins/action/debug.py @@ -20,40 +20,45 @@ __metaclass__ = type from ansible.plugins.action import ActionBase from ansible.utils.boolean import boolean from ansible.utils.unicode import to_unicode +from ansible.errors import AnsibleUndefinedVariable class ActionModule(ActionBase): ''' Print statements during execution ''' TRANSFERS_FILES = False + VALID_ARGS = set(['msg', 'var']) def run(self, tmp=None, task_vars=None): if task_vars is None: task_vars = dict() + for arg in self._task.args: + if arg not in self.VALID_ARGS: + return {"failed": True, "msg": "'%s' is not a valid option in debug" % arg} + + if 'msg' in self._task.args and 'var' in self._task.args: + return {"failed": True, "msg": "'msg' and 'var' are incompatible options"} + result = super(ActionModule, self).run(tmp, task_vars) if 'msg' in self._task.args: - if 'fail' in self._task.args and boolean(self._task.args['fail']): - result['failed'] = True - result['msg'] = self._task.args['msg'] - else: - result['msg'] = self._task.args['msg'] - # FIXME: move the LOOKUP_REGEX somewhere else - elif 'var' in self._task.args: # and not utils.LOOKUP_REGEX.search(self._task.args['var']): - results = self._templar.template(self._task.args['var'], convert_bare=True, fail_on_undefined=False) + result['msg'] = self._task.args['msg'] + + elif 'var' in self._task.args: + try: + results = self._templar.template(self._task.args['var'], convert_bare=True, fail_on_undefined=True) + if results == self._task.args['var']: + raise AnsibleUndefinedVariable + except AnsibleUndefinedVariable: + results = "VARIABLE IS NOT DEFINED!" + if type(self._task.args['var']) in (list, dict): # If var is a list or dict, use the type as key to display result[to_unicode(type(self._task.args['var']))] = results else: - # If var name is same as result, try to template it - if results == self._task.args['var']: - try: - results = self._templar.template("{{" + results + "}}", convert_bare=True, fail_on_undefined=True) - except: - results = "VARIABLE IS NOT DEFINED!" result[self._task.args['var']] = results else: - result['msg'] = 'here we are' + result['msg'] = 'Hello world!' # force flag to make debug output module always verbose result['_ansible_verbose_always'] = True From e2ad4fe9100729462fbd511c75a035ccdfd41841 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sun, 13 Dec 2015 00:34:23 -0500 Subject: [PATCH 0110/1113] include all packaging in tarball not juse rpm spec file --- MANIFEST.in | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index d8402f0297f..64c5bf1fcba 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,12 +4,13 @@ prune ticket_stubs prune packaging prune test prune hacking -include README.md packaging/rpm/ansible.spec COPYING +include README.md COPYING include examples/hosts include examples/ansible.cfg include lib/ansible/module_utils/powershell.ps1 recursive-include lib/ansible/modules * recursive-include docs * +recursive-include packaging * include Makefile include VERSION include MANIFEST.in From 4779f29777872f1352c65ea504eb81e998a47b7b Mon Sep 17 00:00:00 2001 From: Usman Ehtesham Gul Date: Sun, 13 Dec 2015 01:24:27 -0500 Subject: [PATCH 0111/1113] Fix Doc mistake Fix Doc mistake in ansible/docsite/rst/playbooks_variables.rst --- docsite/rst/playbooks_variables.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/playbooks_variables.rst b/docsite/rst/playbooks_variables.rst index 307387a72e5..122c0ef9232 100644 --- a/docsite/rst/playbooks_variables.rst +++ b/docsite/rst/playbooks_variables.rst @@ -796,7 +796,7 @@ Basically, anything that goes into "role defaults" (the defaults folder inside t .. [1] Tasks in each role will see their own role's defaults. Tasks defined outside of a role will see the last role's defaults. .. [2] Variables defined in inventory file or provided by dynamic inventory. -.. note:: Within a any section, redefining a var will overwrite the previous instance. +.. note:: Within any section, redefining a var will overwrite the previous instance. If multiple groups have the same variable, the last one loaded wins. If you define a variable twice in a play's vars: section, the 2nd one wins. .. note:: the previous describes the default config `hash_behavior=replace`, switch to 'merge' to only partially overwrite. From 1b2ebe8defddbb6f6cd471f999d6eba8b78f1446 Mon Sep 17 00:00:00 2001 From: Robin Roth Date: Sun, 13 Dec 2015 10:56:47 +0100 Subject: [PATCH 0112/1113] make shallow clone the default for ansibel-pull --- lib/ansible/cli/pull.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py index 67e89259303..7b2fd13e5ee 100644 --- a/lib/ansible/cli/pull.py +++ b/lib/ansible/cli/pull.py @@ -80,8 +80,8 @@ class PullCLI(CLI): help='directory to checkout repository to') self.parser.add_option('-U', '--url', dest='url', default=None, help='URL of the playbook repository') - self.parser.add_option('--depth', dest='depth', default=None, - help='Depth of checkout, shallow checkout if greater or equal 1 . Defaults to full checkout.') + self.parser.add_option('--full', dest='fullclone', action='store_true', + help='Do a full clone, instead of a shallow one.') self.parser.add_option('-C', '--checkout', dest='checkout', help='branch/tag/commit to checkout. ' 'Defaults to behavior of repository module.') self.parser.add_option('--accept-host-key', default=False, dest='accept_host_key', action='store_true', @@ -157,8 +157,8 @@ class PullCLI(CLI): if self.options.verify: repo_opts += ' verify_commit=yes' - if self.options.depth: - repo_opts += ' depth=%s' % self.options.depth + if not self.options.fullclone: + repo_opts += ' depth=1' path = module_loader.find_plugin(self.options.module_name) From 1bd8d97093f30e4848640a5c43a7f830a9112e2f Mon Sep 17 00:00:00 2001 From: Robin Roth Date: Sun, 13 Dec 2015 11:19:50 +0100 Subject: [PATCH 0113/1113] fix whitespace --- lib/ansible/cli/pull.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py index 7b2fd13e5ee..2571717766e 100644 --- a/lib/ansible/cli/pull.py +++ b/lib/ansible/cli/pull.py @@ -156,7 +156,7 @@ class PullCLI(CLI): if self.options.verify: repo_opts += ' verify_commit=yes' - + if not self.options.fullclone: repo_opts += ' depth=1' From d8e6bc98a2494628aca2fc406655dce70701f525 Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Wed, 9 Dec 2015 17:09:34 -0500 Subject: [PATCH 0114/1113] Fix overloaded options. Show an error when no action given. Don't show a helpful list of commands and descriptions. --- lib/ansible/cli/galaxy.py | 68 ++++++++------------------------------- 1 file changed, 13 insertions(+), 55 deletions(-) diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 0f9074da935..13df7c41220 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -48,50 +48,14 @@ except ImportError: class GalaxyCLI(CLI): - available_commands = { - "delete": "remove a role from Galaxy", - "import": "add a role contained in a GitHub repo to Galaxy", - "info": "display details about a particular role", - "init": "create a role directory structure in your roles path", - "install": "download a role into your roles path", - "list": "enumerate roles found in your roles path", - "login": "authenticate with Galaxy API and store the token", - "remove": "delete a role from your roles path", - "search": "query the Galaxy API", - "setup": "add a TravisCI integration to Galaxy", - } - SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" ) - + VALID_ACTIONS = ("delete","import","info","init","install","list","login","remove","search","setup") + def __init__(self, args): - self.VALID_ACTIONS = self.available_commands.keys() - self.VALID_ACTIONS.sort() self.api = None self.galaxy = None super(GalaxyCLI, self).__init__(args) - def set_action(self): - """ - Get the action the user wants to execute from the sys argv list. - """ - for i in range(0,len(self.args)): - arg = self.args[i] - if arg in self.VALID_ACTIONS: - self.action = arg - del self.args[i] - break - - if not self.action: - self.show_available_actions() - - def show_available_actions(self): - # list available commands - display.display(u'\n' + "usage: ansible-galaxy COMMAND [--help] [options] ...") - display.display(u'\n' + "availabe commands:" + u'\n\n') - for key in self.VALID_ACTIONS: - display.display(u'\t' + "%-12s %s" % (key, self.available_commands[key])) - display.display(' ') - def parse(self): ''' create an options parser for bin/ansible ''' @@ -107,11 +71,11 @@ class GalaxyCLI(CLI): self.parser.set_usage("usage: %prog delete [options] github_user github_repo") elif self.action == "import": self.parser.set_usage("usage: %prog import [options] github_user github_repo") - self.parser.add_option('-n', '--no-wait', dest='wait', action='store_false', default=True, + self.parser.add_option('--no-wait', dest='wait', action='store_false', default=True, help='Don\'t wait for import results.') - self.parser.add_option('-b', '--branch', dest='reference', + self.parser.add_option('--branch', dest='reference', help='The name of a branch to import. Defaults to the repository\'s default branch (usually master)') - self.parser.add_option('-t', '--status', dest='check_status', action='store_true', default=False, + self.parser.add_option('--status', dest='check_status', action='store_true', default=False, help='Check the status of the most recent import request for given github_user/github_repo.') elif self.action == "info": self.parser.set_usage("usage: %prog info [options] role_name[,version]") @@ -147,15 +111,14 @@ class GalaxyCLI(CLI): help='GitHub username') self.parser.set_usage("usage: %prog search [searchterm1 searchterm2] [--galaxy-tags galaxy_tag1,galaxy_tag2] [--platforms platform1,platform2] [--author username]") elif self.action == "setup": - self.parser.set_usage("usage: %prog setup [options] source github_user github_repo secret" + - u'\n\n' + "Create an integration with travis.") - self.parser.add_option('-r', '--remove', dest='remove_id', default=None, + self.parser.set_usage("usage: %prog setup [options] source github_user github_repo secret") + self.parser.add_option('--remove', dest='remove_id', default=None, help='Remove the integration matching the provided ID value. Use --list to see ID values.') - self.parser.add_option('-l', '--list', dest="setup_list", action='store_true', default=False, + self.parser.add_option('--list', dest="setup_list", action='store_true', default=False, help='List all of your integrations.') # options that apply to more than one action - if not self.action in ("config","import","init","login","setup"): + if not self.action in ("import","init","login","setup"): self.parser.add_option('-p', '--roles-path', dest='roles_path', default=C.DEFAULT_ROLES_PATH, help='The path to the directory containing your roles. ' 'The default is the roles_path configured in your ' @@ -171,19 +134,14 @@ class GalaxyCLI(CLI): self.parser.add_option('-f', '--force', dest='force', action='store_true', default=False, help='Force overwriting an existing role') - if self.action: - # get options, args and galaxy object - self.options, self.args =self.parser.parse_args() - display.verbosity = self.options.verbosity - self.galaxy = Galaxy(self.options) + self.options, self.args =self.parser.parse_args() + display.verbosity = self.options.verbosity + self.galaxy = Galaxy(self.options) return True def run(self): - - if not self.action: - return True - + super(GalaxyCLI, self).run() # if not offline, get connect to galaxy api From 989604b1a3977e6246f997d1a75aaf97776b28ae Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Wed, 9 Dec 2015 17:12:53 -0500 Subject: [PATCH 0115/1113] Fix typo. --- docsite/rst/galaxy.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/galaxy.rst b/docsite/rst/galaxy.rst index 783ac15e456..c9dea273367 100644 --- a/docsite/rst/galaxy.rst +++ b/docsite/rst/galaxy.rst @@ -126,7 +126,7 @@ The above will create the following directory structure in the current working d :: README.md - .travsis.yml + .travis.yml defaults/ main.yml files/ From bc7392009069749042bf937eb315ea19c513d0ff Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Wed, 9 Dec 2015 18:28:57 -0500 Subject: [PATCH 0116/1113] Updated ansible-galaxy man page. Removed -b option for import. --- docs/man/man1/ansible-galaxy.1.asciidoc.in | 202 ++++++++++++++++++++- lib/ansible/cli/galaxy.py | 4 +- 2 files changed, 201 insertions(+), 5 deletions(-) diff --git a/docs/man/man1/ansible-galaxy.1.asciidoc.in b/docs/man/man1/ansible-galaxy.1.asciidoc.in index e6f2d0b4568..44f0b46b085 100644 --- a/docs/man/man1/ansible-galaxy.1.asciidoc.in +++ b/docs/man/man1/ansible-galaxy.1.asciidoc.in @@ -12,7 +12,7 @@ ansible-galaxy - manage roles using galaxy.ansible.com SYNOPSIS -------- -ansible-galaxy [init|info|install|list|remove] [--help] [options] ... +ansible-galaxy [delete|import|info|init|install|list|login|remove|search|setup] [--help] [options] ... DESCRIPTION @@ -20,7 +20,7 @@ DESCRIPTION *Ansible Galaxy* is a shared repository for Ansible roles. The ansible-galaxy command can be used to manage these roles, -or by creating a skeleton framework for roles you'd like to upload to Galaxy. +or for creating a skeleton framework for roles you'd like to upload to Galaxy. COMMON OPTIONS -------------- @@ -29,7 +29,6 @@ COMMON OPTIONS Show a help message related to the given sub-command. - INSTALL ------- @@ -145,6 +144,203 @@ The path to the directory containing your roles. The default is the *roles_path* configured in your *ansible.cfg* file (/etc/ansible/roles if not configured) +SEARCH +------ + +The *search* sub-command returns a filtered list of roles found at +galaxy.ansible.com. + +USAGE +~~~~~ + +$ ansible-galaxy search [options] [searchterm1 searchterm2] + + +OPTIONS +~~~~~~~ +*--galaxy-tags*:: + +Provide a comma separated list of Galaxy Tags on which to filter. + +*--platforms*:: + +Provide a comma separated list of Platforms on which to filter. + +*--author*:: + +Specify the username of a Galaxy contributor on which to filter. + +*-c*, *--ingore-certs*:: + +Ignore TLS certificate errors. + +*-s*, *--server*:: + +Override the default server https://galaxy.ansible.com. + + +INFO +---- + +The *info* sub-command shows detailed information for a specific role. +Details returned about the role included information from the local copy +as well as information from galaxy.ansible.com. + +USAGE +~~~~~ + +$ ansible-galaxy info [options] role_name[, version] + +OPTIONS +~~~~~~~ + +*-p* 'ROLES_PATH', *--roles-path=*'ROLES_PATH':: + +The path to the directory containing your roles. The default is the *roles_path* +configured in your *ansible.cfg* file (/etc/ansible/roles if not configured) + +*-c*, *--ingore-certs*:: + +Ignore TLS certificate errors. + +*-s*, *--server*:: + +Override the default server https://galaxy.ansible.com. + + +LOGIN +----- + +The *login* sub-command is used to authenticate with galaxy.ansible.com. +Authentication is required to use the import, delete and setup commands. +It will authenticate the user,retrieve a token from Galaxy, and store it +in the user's home directory. + +USAGE +~~~~~ + +$ ansible-galaxy login [options] + +The *login* sub-command prompts for a *GitHub* username and password. It does +NOT send your password to Galaxy. It actually authenticates with GitHub and +creates a personal access token. It then sends the personal access token to +Galaxy, which in turn verifies that you are you and returns a Galaxy access +token. After authentication completes the *GitHub* personal access token is +destroyed. + +If you do not wish to use your GitHub password, or if you have two-factor +authentication enabled with GitHub, use the *--github-token* option to pass a +personal access token that you create. Log into GitHub, go to Settings and +click on Personal Access Token to create a token. + +OPTIONS +~~~~~~~ + +*-c*, *--ingore-certs*:: + +Ignore TLS certificate errors. + +*-s*, *--server*:: + +Override the default server https://galaxy.ansible.com. + +*--github-token*:: + +Authenticate using a *GitHub* personal access token rather than a password. + + +IMPORT +------ + +Import a role from *GitHub* to galaxy.ansible.com. Requires the user first +authenticate with galaxy.ansible.com using the *login* subcommand. + +USAGE +~~~~~ + +$ ansible-galaxy import [options] github_user github_repo + +OPTIONS +~~~~~~~ +*-c*, *--ingore-certs*:: + +Ignore TLS certificate errors. + +*-s*, *--server*:: + +Override the default server https://galaxy.ansible.com. + +*--branch*:: + +Provide a specific branch to import. When a branch is not specified the +branch found in meta/main.yml is used. If no branch is specified in +meta/main.yml, the repo's default branch (usually master) is used. + + +DELETE +------ + +The *delete* sub-command will delete a role from galaxy.ansible.com. Requires +the user first authenticate with galaxy.ansible.com using the *login* subcommand. + +USAGE +~~~~~ + +$ ansible-galaxy delete [options] github_user github_repo + +OPTIONS +~~~~~~~ + +*-c*, *--ingore-certs*:: + +Ignore TLS certificate errors. + +*-s*, *--server*:: + +Override the default server https://galaxy.ansible.com. + + +SETUP +----- + +The *setup* sub-command creates an integration point for *Travis CI*, enabling +galaxy.ansible.com to receive notifications from *Travis* on build completion. +Requires the user first authenticate with galaxy.ansible.com using the *login* +subcommand. + +USAGE +~~~~~ + +$ ansible-galaxy setup [options] source github_user github_repo secret + +* Use *travis* as the source value. In the future additional source values may + be added. + +* Provide your *Travis* user token as the secret. The token is not stored by + galaxy.ansible.com. A hash is created using github_user, github_repo + and your token. The hash value is what actually gets stored. + +OPTIONS +~~~~~~~ + +*-c*, *--ingore-certs*:: + +Ignore TLS certificate errors. + +*-s*, *--server*:: + +Override the default server https://galaxy.ansible.com. + +--list:: + +Show your configured integrations. Provids the ID of each integration +which can be used with the remove option. + +--remove:: + +Remove a specific integration. Provide the ID of the integration to +be removed. + AUTHOR ------ diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 13df7c41220..1cd936d028e 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -100,7 +100,7 @@ class GalaxyCLI(CLI): self.parser.set_usage("usage: %prog list [role_name]") elif self.action == "login": self.parser.set_usage("usage: %prog login [options]") - self.parser.add_option('-g','--github-token', dest='token', default=None, + self.parser.add_option('--github-token', dest='token', default=None, help='Identify with github token rather than username and password.') elif self.action == "search": self.parser.add_option('--platforms', dest='platforms', @@ -118,7 +118,7 @@ class GalaxyCLI(CLI): help='List all of your integrations.') # options that apply to more than one action - if not self.action in ("import","init","login","setup"): + if not self.action in ("delete","import","init","login","setup"): self.parser.add_option('-p', '--roles-path', dest='roles_path', default=C.DEFAULT_ROLES_PATH, help='The path to the directory containing your roles. ' 'The default is the roles_path configured in your ' From f1c72ff8f51b749165d5bc4089ca8c8fd5b22789 Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Wed, 9 Dec 2015 22:04:00 -0500 Subject: [PATCH 0117/1113] Make sure it is clear that new commands require using the Galaxy 2.0 Beta site. --- docsite/rst/galaxy.rst | 58 +++++++++++++++++++++++++++++------------- 1 file changed, 40 insertions(+), 18 deletions(-) diff --git a/docsite/rst/galaxy.rst b/docsite/rst/galaxy.rst index c9dea273367..3a12044ca9e 100644 --- a/docsite/rst/galaxy.rst +++ b/docsite/rst/galaxy.rst @@ -1,7 +1,7 @@ Ansible Galaxy ++++++++++++++ -"Ansible Galaxy" can either refer to a website for sharing and downloading Ansible roles, or a command line tool that helps work with roles. +"Ansible Galaxy" can either refer to a website for sharing and downloading Ansible roles, or a command line tool for managing and creating roles. .. contents:: Topics @@ -10,24 +10,36 @@ The Website The website `Ansible Galaxy `_, is a free site for finding, downloading, and sharing community developed Ansible roles. Downloading roles from Galaxy is a great way to jumpstart your automation projects. -You can sign up with social auth and use the download client 'ansible-galaxy' which is included in Ansible 1.4.2 and later. +Access the Galaxy web site using GitHub OAuth, and to install roles use the 'ansible-galaxy' command line tool included in Ansible 1.4.2 and later. Read the "About" page on the Galaxy site for more information. The ansible-galaxy command line tool ```````````````````````````````````` -The command line ansible-galaxy has many different subcommands. +The ansible-galaxy command has many different sub-commands for managing roles both locally and at `galaxy.ansible.com `_. + +.. note:: + + The search, login, import, delete, and setup commands in the Ansible 2.0 version of ansible-galaxy require access to the + 2.0 Beta release of the Galaxy web site available at `https://galaxy-qa.ansible.com `_. + + Use the ``--server`` option to access the beta site. For example:: + + $ ansible-galaxy search --server https://galaxy-qa.ansible.com mysql --author geerlingguy + + Additionally, you can define a server in ansible.cfg:: + + [galaxy] + server=https://galaxy-qa.ansible.com Installing Roles ---------------- -The most obvious is downloading roles from the Ansible Galaxy website:: +The most obvious use of the ansible-galaxy command is downloading roles from `the Ansible Galaxy website `_:: $ ansible-galaxy install username.rolename -.. _galaxy_cli_roles_path: - roles_path =============== @@ -169,7 +181,9 @@ The search command will return a list of the first 1000 results matching your se .. note:: - The format of results pictured here is new in Ansible 2.0. + The search command in Ansible 2.0 requires using the Galaxy 2.0 Beta site. Use the ``--server`` option to access + `https://galaxy-qa.ansible.com `_. You can also add a *server* definition in the [galaxy] + section of your ansible.cfg file. Get More Information About a Role --------------------------------- @@ -213,10 +227,6 @@ This returns everything found in Galaxy for the role: version: watchers_count: 1 -.. note:: - - The format of results pictured here is new in Ansible 2.0. - List Installed Roles -------------------- @@ -262,7 +272,13 @@ To use the import, delete and setup commands authentication with Galaxy is requi As depicted above, the login command prompts for a GitHub username and password. It does NOT send your password to Galaxy. It actually authenticates with GitHub and creates a personal access token. It then sends the personal access token to Galaxy, which in turn verifies that you are you and returns a Galaxy access token. After authentication completes the GitHub personal access token is destroyed. -If you do not wish to use your GitHub password, or if you have two-factor authentication enabled with GitHub, use the --github-token option to pass a personal access token that you create. Log into GitHub, go to Settings and click on Personal Access Token to create a token. +If you do not wish to use your GitHub password, or if you have two-factor authentication enabled with GitHub, use the --github-token option to pass a personal access token that you create. Log into GitHub, go to Settings and click on Personal Access Token to create a token. + +.. note:: + + The login command in Ansible 2.0 requires using the Galaxy 2.0 Beta site. Use the ``--server`` option to access + `https://galaxy-qa.ansible.com `_. You can also add a *server* definition in the [galaxy] + section of your ansible.cfg file. Import a Role ------------- @@ -298,7 +314,9 @@ If the --no-wait option is present, the command will not wait for results. Resul .. note:: - The import command is only available in Ansible 2.0. + The import command in Ansible 2.0 requires using the Galaxy 2.0 Beta site. Use the ``--server`` option to access + `https://galaxy-qa.ansible.com `_. You can also add a *server* definition in the [galaxy] + section of your ansible.cfg file. Delete a Role ------------- @@ -307,13 +325,15 @@ Remove a role from the Galaxy web site using the delete command. You can delete :: - ansible-galaxy delete github_user github_repo + $ ansible-galaxy delete github_user github_repo This only removes the role from Galaxy. It does not impact the actual GitHub repo. .. note:: - The delete command is only available in Ansible 2.0. + The delete command in Ansible 2.0 requires using the Galaxy 2.0 Beta site. Use the ``--server`` option to access + `https://galaxy-qa.ansible.com `_. You can also add a *server* definition in the [galaxy] + section of your ansible.cfg file. Setup Travis Integerations -------------------------- @@ -324,7 +344,7 @@ Using the setup command you can enable notifications from `travis `_. The calculated hash is stored in Galaxy and used to verify notifications received from Travis. @@ -339,7 +359,9 @@ When you create your .travis.yml file add the following to cause Travis to notif .. note:: - The setup command is only available in Ansible 2.0. + The setup command in Ansible 2.0 requires using the Galaxy 2.0 Beta site. Use the ``--server`` option to access + `https://galaxy-qa.ansible.com `_. You can also add a *server* definition in the [galaxy] + section of your ansible.cfg file. List Travis Integrtions @@ -361,7 +383,7 @@ Use the --list option to display your Travis integrations: Remove Travis Integrations ========================== -Use the --remove option to disable a Travis integration: +Use the --remove option to disable and remove a Travis integration: :: From 342dee0023e2c6fd6d361a70fec621c09b833915 Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Wed, 9 Dec 2015 22:56:54 -0500 Subject: [PATCH 0118/1113] Define and handle ignore_certs correctly. Preserve search term order. Tweak to Galaxy docsite. --- docsite/rst/galaxy.rst | 2 +- lib/ansible/cli/galaxy.py | 8 ++++---- lib/ansible/galaxy/api.py | 18 ++++++++---------- 3 files changed, 13 insertions(+), 15 deletions(-) diff --git a/docsite/rst/galaxy.rst b/docsite/rst/galaxy.rst index 3a12044ca9e..200fdfd5750 100644 --- a/docsite/rst/galaxy.rst +++ b/docsite/rst/galaxy.rst @@ -41,7 +41,7 @@ The most obvious use of the ansible-galaxy command is downloading roles from `th $ ansible-galaxy install username.rolename roles_path -=============== +========== You can specify a particular directory where you want the downloaded roles to be placed:: diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 1cd936d028e..a4a7b915f36 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -127,7 +127,7 @@ class GalaxyCLI(CLI): if self.action in ("import","info","init","install","login","search","setup","delete"): self.parser.add_option('-s', '--server', dest='api_server', default=C.GALAXY_SERVER, help='The API server destination') - self.parser.add_option('-c', '--ignore-certs', action='store_false', dest='validate_certs', default=True, + self.parser.add_option('-c', '--ignore-certs', action='store_true', dest='ignore_certs', default=False, help='Ignore SSL certificate validation errors.') if self.action in ("init","install"): @@ -505,7 +505,7 @@ class GalaxyCLI(CLI): terms = [] for i in range(len(self.args)): terms.append(self.args.pop()) - search = '+'.join(terms) + search = '+'.join(terms[::-1]) if not search and not self.options.platforms and not self.options.tags and not self.options.author: raise AnsibleError("Invalid query. At least one search term, platform, galaxy tag or author must be provided.") @@ -520,9 +520,9 @@ class GalaxyCLI(CLI): data = '' if response['count'] > page_size: - data += ("Found %d roles matching your search. Showing first %s.\n" % (response['count'], page_size)) + data += ("\nFound %d roles matching your search. Showing first %s.\n" % (response['count'], page_size)) else: - data += ("Found %d roles matching your search:\n" % response['count']) + data += ("\nFound %d roles matching your search:\n" % response['count']) max_len = [] for role in response['results']: diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py index c1bf2c4ed50..eec9ee932e0 100644 --- a/lib/ansible/galaxy/api.py +++ b/lib/ansible/galaxy/api.py @@ -48,16 +48,15 @@ class GalaxyAPI(object): SUPPORTED_VERSIONS = ['v1'] def __init__(self, galaxy): - self.galaxy = galaxy self.token = GalaxyToken() self._api_server = C.GALAXY_SERVER - self._validate_certs = C.GALAXY_IGNORE_CERTS + self._validate_certs = not C.GALAXY_IGNORE_CERTS # set validate_certs - if galaxy.options.validate_certs == False: + if galaxy.options.ignore_certs: self._validate_certs = False - display.vvv('Check for valid certs: %s' % self._validate_certs) + display.vvv('Validate TLS certificates: %s' % self._validate_certs) # set the API server if galaxy.options.api_server != C.GALAXY_SERVER: @@ -65,14 +64,13 @@ class GalaxyAPI(object): display.vvv("Connecting to galaxy_server: %s" % self._api_server) server_version = self.get_server_api_version() - - if server_version in self.SUPPORTED_VERSIONS: - self.baseurl = '%s/api/%s' % (self._api_server, server_version) - self.version = server_version # for future use - display.vvv("Base API: %s" % self.baseurl) - else: + if not server_version in self.SUPPORTED_VERSIONS: raise AnsibleError("Unsupported Galaxy server API version: %s" % server_version) + self.baseurl = '%s/api/%s' % (self._api_server, server_version) + self.version = server_version # for future use + display.vvv("Base API: %s" % self.baseurl) + def __auth_header(self): token = self.token.get() if token is None: From 847f454bccb6ec3942ff5d652db7dd1db4d77159 Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Wed, 9 Dec 2015 23:25:23 -0500 Subject: [PATCH 0119/1113] Add a section to intro_configuration for Galaxy. --- docsite/rst/intro_configuration.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docsite/rst/intro_configuration.rst b/docsite/rst/intro_configuration.rst index dda07fc4506..0ad54938d08 100644 --- a/docsite/rst/intro_configuration.rst +++ b/docsite/rst/intro_configuration.rst @@ -897,3 +897,19 @@ The normal behaviour is for operations to copy the existing context or use the u The default list is: nfs,vboxsf,fuse,ramfs:: special_context_filesystems = nfs,vboxsf,fuse,ramfs,myspecialfs + +Galaxy Settings +--------------- + +The following options can be set in the [galaxy] section of ansible.cfg: + +server +====== + +Override the default Galaxy server value of https://galaxy.ansible.com. + +ignore_certs +============ + +If set to *yes*, ansible-galaxy will not validate TLS certificates. Handy for testing against a server with a self-signed certificate +. \ No newline at end of file From 06dde0d332d88e958ac5489bea88f0f5bc536e1b Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Thu, 10 Dec 2015 10:57:48 -0500 Subject: [PATCH 0120/1113] Fixed documentation typos and bits that needed clarification. Fixed missing spaces in VALID_ACTIONS. --- docs/man/man1/ansible-galaxy.1.asciidoc.in | 19 ++++++++++--------- docsite/rst/galaxy.rst | 4 ++-- docsite/rst/intro_configuration.rst | 4 ++-- lib/ansible/cli/galaxy.py | 2 +- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/docs/man/man1/ansible-galaxy.1.asciidoc.in b/docs/man/man1/ansible-galaxy.1.asciidoc.in index 44f0b46b085..9ffe65e45a7 100644 --- a/docs/man/man1/ansible-galaxy.1.asciidoc.in +++ b/docs/man/man1/ansible-galaxy.1.asciidoc.in @@ -147,8 +147,9 @@ configured in your *ansible.cfg* file (/etc/ansible/roles if not configured) SEARCH ------ -The *search* sub-command returns a filtered list of roles found at -galaxy.ansible.com. +The *search* sub-command returns a filtered list of roles found on the remote +server. + USAGE ~~~~~ @@ -170,7 +171,7 @@ Provide a comma separated list of Platforms on which to filter. Specify the username of a Galaxy contributor on which to filter. -*-c*, *--ingore-certs*:: +*-c*, *--ignore-certs*:: Ignore TLS certificate errors. @@ -199,7 +200,7 @@ OPTIONS The path to the directory containing your roles. The default is the *roles_path* configured in your *ansible.cfg* file (/etc/ansible/roles if not configured) -*-c*, *--ingore-certs*:: +*-c*, *--ignore-certs*:: Ignore TLS certificate errors. @@ -213,7 +214,7 @@ LOGIN The *login* sub-command is used to authenticate with galaxy.ansible.com. Authentication is required to use the import, delete and setup commands. -It will authenticate the user,retrieve a token from Galaxy, and store it +It will authenticate the user, retrieve a token from Galaxy, and store it in the user's home directory. USAGE @@ -236,7 +237,7 @@ click on Personal Access Token to create a token. OPTIONS ~~~~~~~ -*-c*, *--ingore-certs*:: +*-c*, *--ignore-certs*:: Ignore TLS certificate errors. @@ -262,7 +263,7 @@ $ ansible-galaxy import [options] github_user github_repo OPTIONS ~~~~~~~ -*-c*, *--ingore-certs*:: +*-c*, *--ignore-certs*:: Ignore TLS certificate errors. @@ -291,7 +292,7 @@ $ ansible-galaxy delete [options] github_user github_repo OPTIONS ~~~~~~~ -*-c*, *--ingore-certs*:: +*-c*, *--ignore-certs*:: Ignore TLS certificate errors. @@ -323,7 +324,7 @@ $ ansible-galaxy setup [options] source github_user github_repo secret OPTIONS ~~~~~~~ -*-c*, *--ingore-certs*:: +*-c*, *--ignore-certs*:: Ignore TLS certificate errors. diff --git a/docsite/rst/galaxy.rst b/docsite/rst/galaxy.rst index 200fdfd5750..f8cde57e62c 100644 --- a/docsite/rst/galaxy.rst +++ b/docsite/rst/galaxy.rst @@ -364,8 +364,8 @@ When you create your .travis.yml file add the following to cause Travis to notif section of your ansible.cfg file. -List Travis Integrtions -======================= +List Travis Integrations +======================== Use the --list option to display your Travis integrations: diff --git a/docsite/rst/intro_configuration.rst b/docsite/rst/intro_configuration.rst index 0ad54938d08..ccfb456ed93 100644 --- a/docsite/rst/intro_configuration.rst +++ b/docsite/rst/intro_configuration.rst @@ -906,10 +906,10 @@ The following options can be set in the [galaxy] section of ansible.cfg: server ====== -Override the default Galaxy server value of https://galaxy.ansible.com. +Override the default Galaxy server value of https://galaxy.ansible.com. Useful if you have a hosted version of the Galaxy web app or want to point to the testing site https://galaxy-qa.ansible.com. It does not work against private, hosted repos, which Galaxy can use for fetching and installing roles. ignore_certs ============ If set to *yes*, ansible-galaxy will not validate TLS certificates. Handy for testing against a server with a self-signed certificate -. \ No newline at end of file +. diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index a4a7b915f36..34afa03c9f7 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -49,7 +49,7 @@ except ImportError: class GalaxyCLI(CLI): SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" ) - VALID_ACTIONS = ("delete","import","info","init","install","list","login","remove","search","setup") + VALID_ACTIONS = ("delete", "import", "info", "init", "install", "list", "login", "remove", "search", "setup") def __init__(self, args): self.api = None From 95785f149d21badaf7cba35b4ffa7ed5805235d4 Mon Sep 17 00:00:00 2001 From: chouseknecht Date: Thu, 10 Dec 2015 21:44:03 -0500 Subject: [PATCH 0121/1113] Fix docs. The search command works with both galaxy.ansible.com and galaxy-qa.ansible.com. --- docsite/rst/galaxy.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docsite/rst/galaxy.rst b/docsite/rst/galaxy.rst index f8cde57e62c..6d64a542b4a 100644 --- a/docsite/rst/galaxy.rst +++ b/docsite/rst/galaxy.rst @@ -181,9 +181,7 @@ The search command will return a list of the first 1000 results matching your se .. note:: - The search command in Ansible 2.0 requires using the Galaxy 2.0 Beta site. Use the ``--server`` option to access - `https://galaxy-qa.ansible.com `_. You can also add a *server* definition in the [galaxy] - section of your ansible.cfg file. + The format of results pictured here is new in Ansible 2.0. Get More Information About a Role --------------------------------- From 2bc3683d41b307611a03447e9d4b194ba6ef5c1c Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 13 Dec 2015 05:54:57 -0800 Subject: [PATCH 0122/1113] Restore comment about for-else since it is an uncommon idiom --- lib/ansible/plugins/action/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index 154404e474c..254bab476bb 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -119,7 +119,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type) if module_path: break - else: + else: # This is a for-else: http://bit.ly/1ElPkyg # Use Windows version of ping module to check module paths when # using a connection that supports .ps1 suffixes. We check specifically # for win_ping here, otherwise the code would look for ping.ps1 From 0c954bd14298a81be4c9026563326a87f9c42f58 Mon Sep 17 00:00:00 2001 From: Robin Roth Date: Sun, 13 Dec 2015 18:00:54 +0100 Subject: [PATCH 0123/1113] add --full flag to ansible-pull man page add --full flag that was added in #13502 --- docs/man/man1/ansible-pull.1.asciidoc.in | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/man/man1/ansible-pull.1.asciidoc.in b/docs/man/man1/ansible-pull.1.asciidoc.in index 333b8e34e0f..0afba2aeaac 100644 --- a/docs/man/man1/ansible-pull.1.asciidoc.in +++ b/docs/man/man1/ansible-pull.1.asciidoc.in @@ -95,6 +95,10 @@ Force running of playbook even if unable to update playbook repository. This can be useful, for example, to enforce run-time state when a network connection may not always be up or possible. +*--full*:: + +Do a full clone of the repository. By default ansible-pull will do a shallow clone based on the last revision. + *-h*, *--help*:: Show the help message and exit. From 89603a0509117610e2cbebc6c48475a3b8af98b2 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sun, 13 Dec 2015 12:18:28 -0500 Subject: [PATCH 0124/1113] added that ansible-pull is now shallow to changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2bf11e6c5bc..c6319634fb7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -350,6 +350,7 @@ newline being stripped you can change your playbook like this: * We do not ignore the explicitly set login user for ssh when it matches the 'current user' anymore, this allows overriding .ssh/config when it is set explicitly. Leaving it unset will still use the same user and respect .ssh/config. This also means ansible_ssh_user can now return a None value. * environment variables passed to remote shells now default to 'controller' settings, with fallback to en_us.UTF8 which was the previous default. +* ansible-pull now defaults to doing shallow checkouts with git, use `--full` to return to previous behaviour. * Handling of undefined variables has changed. In most places they will now raise an error instead of silently injecting an empty string. Use the default filter if you want to approximate the old behaviour: ``` From f8ff63f8c8ab001ea8f096968b550f23262c193c Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 14 Dec 2015 03:06:52 -0500 Subject: [PATCH 0125/1113] A few tweaks to improve new forking code --- lib/ansible/plugins/strategy/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index ea30b800b02..4047bde73a2 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -149,17 +149,20 @@ class StrategyBase: # way to share them with the forked processes shared_loader_obj = SharedPluginLoaderObj() + queued = False while True: (worker_prc, main_q, rslt_q) = self._workers[self._cur_worker] if worker_prc is None or not worker_prc.is_alive(): worker_prc = WorkerProcess(rslt_q, task_vars, host, task, play_context, self._loader, self._variable_manager, shared_loader_obj) self._workers[self._cur_worker][0] = worker_prc worker_prc.start() - break + queued = True self._cur_worker += 1 if self._cur_worker >= len(self._workers): self._cur_worker = 0 time.sleep(0.0001) + if queued: + break del task_vars self._pending_results += 1 @@ -196,7 +199,7 @@ class StrategyBase: else: iterator.mark_host_failed(host) (state, tmp_task) = iterator.get_next_task_for_host(host, peek=True) - if state.run_state != PlayIterator.ITERATING_RESCUE: + if not state or state.run_state != PlayIterator.ITERATING_RESCUE: self._tqm._failed_hosts[host.name] = True self._tqm._stats.increment('failures', host.name) else: From 279c5a359631d296e1a91c1520417e68750138bb Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 14 Dec 2015 03:07:20 -0500 Subject: [PATCH 0126/1113] Cleanup strategy tests broken by new forking strategy --- .../plugins/strategies/test_strategy_base.py | 127 +++++++++++------- 1 file changed, 76 insertions(+), 51 deletions(-) diff --git a/test/units/plugins/strategies/test_strategy_base.py b/test/units/plugins/strategies/test_strategy_base.py index bf01cf6fcc2..7cc81a0324e 100644 --- a/test/units/plugins/strategies/test_strategy_base.py +++ b/test/units/plugins/strategies/test_strategy_base.py @@ -24,8 +24,11 @@ from ansible.compat.tests.mock import patch, MagicMock from ansible.errors import AnsibleError, AnsibleParserError from ansible.plugins.strategy import StrategyBase +from ansible.executor.process.worker import WorkerProcess from ansible.executor.task_queue_manager import TaskQueueManager from ansible.executor.task_result import TaskResult +from ansible.playbook.handler import Handler +from ansible.inventory.host import Host from six.moves import queue as Queue from units.mock.loader import DictDataLoader @@ -98,37 +101,44 @@ class TestStrategyBase(unittest.TestCase): mock_tqm._unreachable_hosts = ["host02"] self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[2:]) - def test_strategy_base_queue_task(self): - fake_loader = DictDataLoader() - - workers = [] - for i in range(0, 3): - worker_main_q = MagicMock() - worker_main_q.put.return_value = None - worker_result_q = MagicMock() - workers.append([i, worker_main_q, worker_result_q]) + @patch.object(WorkerProcess, 'run') + def test_strategy_base_queue_task(self, mock_worker): + def fake_run(self): + return - mock_tqm = MagicMock() - mock_tqm._final_q = MagicMock() - mock_tqm.get_workers.return_value = workers - mock_tqm.get_loader.return_value = fake_loader + mock_worker.run.side_effect = fake_run - strategy_base = StrategyBase(tqm=mock_tqm) - strategy_base._cur_worker = 0 - strategy_base._pending_results = 0 - strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), play_context=MagicMock()) - self.assertEqual(strategy_base._cur_worker, 1) - self.assertEqual(strategy_base._pending_results, 1) - strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), play_context=MagicMock()) - self.assertEqual(strategy_base._cur_worker, 2) - self.assertEqual(strategy_base._pending_results, 2) - strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), play_context=MagicMock()) - self.assertEqual(strategy_base._cur_worker, 0) - self.assertEqual(strategy_base._pending_results, 3) - workers[0][1].put.side_effect = EOFError - strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), play_context=MagicMock()) - self.assertEqual(strategy_base._cur_worker, 1) - self.assertEqual(strategy_base._pending_results, 3) + fake_loader = DictDataLoader() + mock_var_manager = MagicMock() + mock_host = MagicMock() + mock_inventory = MagicMock() + mock_options = MagicMock() + mock_options.module_path = None + + tqm = TaskQueueManager( + inventory=mock_inventory, + variable_manager=mock_var_manager, + loader=fake_loader, + options=mock_options, + passwords=None, + ) + tqm._initialize_processes(3) + tqm.hostvars = dict() + + try: + strategy_base = StrategyBase(tqm=tqm) + strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock()) + self.assertEqual(strategy_base._cur_worker, 1) + self.assertEqual(strategy_base._pending_results, 1) + strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock()) + self.assertEqual(strategy_base._cur_worker, 2) + self.assertEqual(strategy_base._pending_results, 2) + strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock()) + self.assertEqual(strategy_base._cur_worker, 0) + self.assertEqual(strategy_base._pending_results, 3) + finally: + tqm.cleanup() + def test_strategy_base_process_pending_results(self): mock_tqm = MagicMock() @@ -156,6 +166,7 @@ class TestStrategyBase(unittest.TestCase): mock_iterator = MagicMock() mock_iterator.mark_host_failed.return_value = None + mock_iterator.get_next_task_for_host.return_value = (None, None) mock_host = MagicMock() mock_host.name = 'test01' @@ -315,22 +326,15 @@ class TestStrategyBase(unittest.TestCase): res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator) self.assertEqual(res, []) - def test_strategy_base_run_handlers(self): - workers = [] - for i in range(0, 3): - worker_main_q = MagicMock() - worker_main_q.put.return_value = None - worker_result_q = MagicMock() - workers.append([i, worker_main_q, worker_result_q]) - - mock_tqm = MagicMock() - mock_tqm._final_q = MagicMock() - mock_tqm.get_workers.return_value = workers - mock_tqm.send_callback.return_value = None - + @patch.object(WorkerProcess, 'run') + def test_strategy_base_run_handlers(self, mock_worker): + def fake_run(*args): + return + mock_worker.side_effect = fake_run mock_play_context = MagicMock() - mock_handler_task = MagicMock() + mock_handler_task = MagicMock(Handler) + mock_handler_task.action = 'foo' mock_handler_task.get_name.return_value = "test handler" mock_handler_task.has_triggered.return_value = False @@ -341,11 +345,9 @@ class TestStrategyBase(unittest.TestCase): mock_play = MagicMock() mock_play.handlers = [mock_handler] - mock_host = MagicMock() + mock_host = MagicMock(Host) mock_host.name = "test01" - mock_iterator = MagicMock() - mock_inventory = MagicMock() mock_inventory.get_hosts.return_value = [mock_host] @@ -355,8 +357,31 @@ class TestStrategyBase(unittest.TestCase): mock_iterator = MagicMock mock_iterator._play = mock_play - strategy_base = StrategyBase(tqm=mock_tqm) - strategy_base._inventory = mock_inventory - strategy_base._notified_handlers = {"test handler": [mock_host]} - - result = strategy_base.run_handlers(iterator=mock_iterator, play_context=mock_play_context) + fake_loader = DictDataLoader() + mock_options = MagicMock() + mock_options.module_path = None + + tqm = TaskQueueManager( + inventory=mock_inventory, + variable_manager=mock_var_mgr, + loader=fake_loader, + options=mock_options, + passwords=None, + ) + tqm._initialize_processes(3) + tqm.hostvars = dict() + + try: + strategy_base = StrategyBase(tqm=tqm) + + strategy_base._inventory = mock_inventory + strategy_base._notified_handlers = {"test handler": [mock_host]} + + mock_return_task = MagicMock(Handler) + mock_return_host = MagicMock(Host) + task_result = TaskResult(mock_return_host, mock_return_task, dict(changed=False)) + tqm._final_q.put(('host_task_ok', task_result)) + + result = strategy_base.run_handlers(iterator=mock_iterator, play_context=mock_play_context) + finally: + tqm.cleanup() From f5f9b2fd354fe013e68f589279cc349a42a461fb Mon Sep 17 00:00:00 2001 From: Hans-Joachim Kliemeck Date: Mon, 14 Dec 2015 14:36:35 +0100 Subject: [PATCH 0127/1113] use default settings from ansible.cfg --- lib/ansible/cli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py index 012872be7c5..48e01346726 100644 --- a/lib/ansible/cli/__init__.py +++ b/lib/ansible/cli/__init__.py @@ -246,7 +246,7 @@ class CLI(object): help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS) if vault_opts: - parser.add_option('--ask-vault-pass', default=False, dest='ask_vault_pass', action='store_true', + parser.add_option('--ask-vault-pass', default=C.DEFAULT_ASK_VAULT_PASS, dest='ask_vault_pass', action='store_true', help='ask for vault password') parser.add_option('--vault-password-file', default=C.DEFAULT_VAULT_PASSWORD_FILE, dest='vault_password_file', help="vault password file", action="callback", callback=CLI.expand_tilde, type=str) From 1f8e484b70f90d34d127eda9cf10a619bb0e72e8 Mon Sep 17 00:00:00 2001 From: Monty Taylor Date: Thu, 3 Dec 2015 07:07:13 -0800 Subject: [PATCH 0128/1113] Fix the refresh flag in openstack inventory Refresh will update the dogpile cache from shade, but doesn't cause the ansible side json cache to be invalidated. It's a simple oversight. --- contrib/inventory/openstack.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/contrib/inventory/openstack.py b/contrib/inventory/openstack.py index 46b43e92212..231488b06df 100755 --- a/contrib/inventory/openstack.py +++ b/contrib/inventory/openstack.py @@ -94,9 +94,9 @@ def get_groups_from_server(server_vars): return groups -def get_host_groups(inventory): +def get_host_groups(inventory, refresh=False): (cache_file, cache_expiration_time) = get_cache_settings() - if is_cache_stale(cache_file, cache_expiration_time): + if is_cache_stale(cache_file, cache_expiration_time, refresh=refresh): groups = to_json(get_host_groups_from_cloud(inventory)) open(cache_file, 'w').write(groups) else: @@ -121,8 +121,10 @@ def get_host_groups_from_cloud(inventory): return groups -def is_cache_stale(cache_file, cache_expiration_time): +def is_cache_stale(cache_file, cache_expiration_time, refresh=False): ''' Determines if cache file has expired, or if it is still valid ''' + if refresh: + return True if os.path.isfile(cache_file): mod_time = os.path.getmtime(cache_file) current_time = time.time() @@ -176,7 +178,7 @@ def main(): ) if args.list: - output = get_host_groups(inventory) + output = get_host_groups(inventory, refresh=args.refresh) elif args.host: output = to_json(inventory.get_host(args.host)) print(output) From 49dc9eea169efb329d7d184df53ce3dea4dface1 Mon Sep 17 00:00:00 2001 From: Jonathan Mainguy Date: Wed, 9 Dec 2015 15:11:21 -0500 Subject: [PATCH 0129/1113] add tests for encrypted hash mysql_user --- .../tasks/user_password_update_test.yml | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/test/integration/roles/test_mysql_user/tasks/user_password_update_test.yml b/test/integration/roles/test_mysql_user/tasks/user_password_update_test.yml index 50307cef956..9a899b206ca 100644 --- a/test/integration/roles/test_mysql_user/tasks/user_password_update_test.yml +++ b/test/integration/roles/test_mysql_user/tasks/user_password_update_test.yml @@ -79,8 +79,23 @@ - include: remove_user.yml user_name={{user_name_2}} user_password={{ user_password_1 }} +- name: Create user with password1234 using hash. (expect changed=true) + mysql_user: name=jmainguy password='*D65798AAC0E5C6DF3F320F8A30E026E7EBD73A95' encrypted=yes + register: encrypt_result +- name: Check that the module made a change + assert: + that: + - "encrypt_result.changed == True" +- name: See if the password needs to be updated. (expect changed=false) + mysql_user: name=jmainguy password='password1234' + register: plain_result +- name: Check that the module did not change the password + assert: + that: + - "plain_result.changed == False" - +- name: Remove user (cleanup) + mysql_user: name=jmainguy state=absent From 9f61144401a16c9d610193522c71e8852addf63e Mon Sep 17 00:00:00 2001 From: Monty Taylor Date: Thu, 3 Dec 2015 07:04:24 -0800 Subject: [PATCH 0130/1113] Optionally only use UUIDs for openstack hosts on duplicates The OpenStack inventory lists hostnames as the UUIDs because hostsnames are not guarnateed to be unique on OpenStack. However, for the common case, this is just confusing. The new behavior is a visible change, so make it an opt-in via config. Only turn the hostnames to UUIDs if there are duplicate hostnames. --- contrib/inventory/openstack.py | 57 +++++++++++++++++++++++++++------ contrib/inventory/openstack.yml | 3 ++ 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/contrib/inventory/openstack.py b/contrib/inventory/openstack.py index 231488b06df..b82a042c29e 100755 --- a/contrib/inventory/openstack.py +++ b/contrib/inventory/openstack.py @@ -32,6 +32,13 @@ # all of them and present them as one contiguous inventory. # # See the adjacent openstack.yml file for an example config file +# There are two ansible inventory specific options that can be set in +# the inventory section. +# expand_hostvars controls whether or not the inventory will make extra API +# calls to fill out additional information about each server +# use_hostnames changes the behavior from registering every host with its UUID +# and making a group of its hostname to only doing this if the +# hostname in question has more than one server import argparse import collections @@ -51,7 +58,7 @@ import shade.inventory CONFIG_FILES = ['/etc/ansible/openstack.yaml'] -def get_groups_from_server(server_vars): +def get_groups_from_server(server_vars, namegroup=True): groups = [] region = server_vars['region'] @@ -76,7 +83,8 @@ def get_groups_from_server(server_vars): groups.append(extra_group) groups.append('instance-%s' % server_vars['id']) - groups.append(server_vars['name']) + if namegroup: + groups.append(server_vars['name']) for key in ('flavor', 'image'): if 'name' in server_vars[key]: @@ -106,17 +114,36 @@ def get_host_groups(inventory, refresh=False): def get_host_groups_from_cloud(inventory): groups = collections.defaultdict(list) + firstpass = collections.defaultdict(list) hostvars = {} - for server in inventory.list_hosts(): + list_args = {} + if hasattr(inventory, 'extra_config'): + use_hostnames = inventory.extra_config['use_hostnames'] + list_args['expand'] = inventory.extra_config['expand_hostvars'] + else: + use_hostnames = False + + for server in inventory.list_hosts(**list_args): if 'interface_ip' not in server: continue - for group in get_groups_from_server(server): - groups[group].append(server['id']) - hostvars[server['id']] = dict( - ansible_ssh_host=server['interface_ip'], - openstack=server, - ) + firstpass[server['name']].append(server) + for name, servers in firstpass.items(): + if len(servers) == 1 and use_hostnames: + server = servers[0] + hostvars[name] = dict( + ansible_ssh_host=server['interface_ip'], + openstack=server) + for group in get_groups_from_server(server, namegroup=False): + groups[group].append(server['name']) + else: + for server in servers: + server_id = server['id'] + hostvars[server_id] = dict( + ansible_ssh_host=server['interface_ip'], + openstack=server) + for group in get_groups_from_server(server, namegroup=True): + groups[group].append(server_id) groups['_meta'] = {'hostvars': hostvars} return groups @@ -171,11 +198,21 @@ def main(): try: config_files = os_client_config.config.CONFIG_FILES + CONFIG_FILES shade.simple_logging(debug=args.debug) - inventory = shade.inventory.OpenStackInventory( + inventory_args = dict( refresh=args.refresh, config_files=config_files, private=args.private, ) + if hasattr(shade.inventory.OpenStackInventory, 'extra_config'): + inventory_args.update(dict( + config_key='ansible', + config_defaults={ + 'use_hostnames': False, + 'expand_hostvars': True, + } + )) + + inventory = shade.inventory.OpenStackInventory(**inventory_args) if args.list: output = get_host_groups(inventory, refresh=args.refresh) diff --git a/contrib/inventory/openstack.yml b/contrib/inventory/openstack.yml index a99bb020580..1520e2937ec 100644 --- a/contrib/inventory/openstack.yml +++ b/contrib/inventory/openstack.yml @@ -26,3 +26,6 @@ clouds: username: stack password: stack project_name: stack +ansible: + use_hostnames: True + expand_hostvars: False From 6312e38133e79674910b2cb8c1b1aa695c6816fc Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 14 Dec 2015 10:35:38 -0500 Subject: [PATCH 0131/1113] Fixing up some non-py3 things for unit tests --- lib/ansible/executor/task_queue_manager.py | 2 +- lib/ansible/module_utils/known_hosts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py index 9189ab95819..dae70a12925 100644 --- a/lib/ansible/executor/task_queue_manager.py +++ b/lib/ansible/executor/task_queue_manager.py @@ -99,7 +99,7 @@ class TaskQueueManager: def _initialize_processes(self, num): self._workers = [] - for i in xrange(num): + for i in range(num): main_q = multiprocessing.Queue() rslt_q = multiprocessing.Queue() self._workers.append([None, main_q, rslt_q]) diff --git a/lib/ansible/module_utils/known_hosts.py b/lib/ansible/module_utils/known_hosts.py index d2644d97666..2824836650a 100644 --- a/lib/ansible/module_utils/known_hosts.py +++ b/lib/ansible/module_utils/known_hosts.py @@ -169,7 +169,7 @@ def add_host_key(module, fqdn, key_type="rsa", create_dir=False): if not os.path.exists(user_ssh_dir): if create_dir: try: - os.makedirs(user_ssh_dir, 0700) + os.makedirs(user_ssh_dir, 0o700) except: module.fail_json(msg="failed to create host key directory: %s" % user_ssh_dir) else: From 80d23d639c2351ab6d0951763ca101516f0f2eb7 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 14 Dec 2015 10:43:30 -0500 Subject: [PATCH 0132/1113] Use an octal representation that works from 2.4->3+ for known_hosts --- lib/ansible/module_utils/known_hosts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/module_utils/known_hosts.py b/lib/ansible/module_utils/known_hosts.py index 2824836650a..9b6af2a28e9 100644 --- a/lib/ansible/module_utils/known_hosts.py +++ b/lib/ansible/module_utils/known_hosts.py @@ -169,7 +169,7 @@ def add_host_key(module, fqdn, key_type="rsa", create_dir=False): if not os.path.exists(user_ssh_dir): if create_dir: try: - os.makedirs(user_ssh_dir, 0o700) + os.makedirs(user_ssh_dir, int('700', 8)) except: module.fail_json(msg="failed to create host key directory: %s" % user_ssh_dir) else: From c9eb41109f83358d8d968457728996f60b30b933 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Mon, 14 Dec 2015 08:03:56 -0800 Subject: [PATCH 0133/1113] Update submodule refs --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 0d23b3df526..e6b7b17326b 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 0d23b3df526875c8fc6edf94268f3aa850ec05f1 +Subproject commit e6b7b17326b4c9d11501112270c52ae25955938a diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index 51813e00333..f3251de29cb 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit 51813e003331c3341b07c5cda33346cada537a3b +Subproject commit f3251de29cb10664b2c63a0021530c3fe34111a3 From 457f86f61a3bef95b562dbf91b523c563bff2f63 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Mon, 14 Dec 2015 08:50:37 -0800 Subject: [PATCH 0134/1113] Minor: Correct type pyhton => python --- test/integration/roles/test_docker/tasks/main.yml | 2 +- test/units/plugins/cache/test_cache.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/roles/test_docker/tasks/main.yml b/test/integration/roles/test_docker/tasks/main.yml index 2ea15644d5f..76b3fa70702 100644 --- a/test/integration/roles/test_docker/tasks/main.yml +++ b/test/integration/roles/test_docker/tasks/main.yml @@ -3,7 +3,7 @@ #- include: docker-setup-rht.yml # Packages on RHEL and CentOS 7 are broken, broken, broken. Revisit when # they've got that sorted out - # CentOS 6 currently broken by conflicting files in pyhton-backports and python-backports-ssl_match_hostname + # CentOS 6 currently broken by conflicting files in python-backports and python-backports-ssl_match_hostname #when: ansible_distribution in ['RedHat', 'CentOS'] and ansible_lsb.major_release|int == 6 # python-docker isn't available until 14.10. Revist at the next Ubuntu LTS diff --git a/test/units/plugins/cache/test_cache.py b/test/units/plugins/cache/test_cache.py index af1d924910d..0547ba55bf0 100644 --- a/test/units/plugins/cache/test_cache.py +++ b/test/units/plugins/cache/test_cache.py @@ -110,6 +110,6 @@ class TestAbstractClass(unittest.TestCase): def test_memory_cachemodule(self): self.assertIsInstance(MemoryCache(), MemoryCache) - @unittest.skipUnless(HAVE_REDIS, 'Redis pyhton module not installed') + @unittest.skipUnless(HAVE_REDIS, 'Redis python module not installed') def test_redis_cachemodule(self): self.assertIsInstance(RedisCache(), RedisCache) From e595c501976d5f378414dec90543151d7319253b Mon Sep 17 00:00:00 2001 From: gp Date: Mon, 14 Dec 2015 12:06:35 -0500 Subject: [PATCH 0135/1113] Fix typo in galaxy.rst Fix typo --- docsite/rst/galaxy.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/galaxy.rst b/docsite/rst/galaxy.rst index 783ac15e456..c9dea273367 100644 --- a/docsite/rst/galaxy.rst +++ b/docsite/rst/galaxy.rst @@ -126,7 +126,7 @@ The above will create the following directory structure in the current working d :: README.md - .travsis.yml + .travis.yml defaults/ main.yml files/ From a7ac98262d94cc24a584b8e163cebc0a2a492cd6 Mon Sep 17 00:00:00 2001 From: Michael Scherer Date: Sat, 12 Dec 2015 20:18:36 +0100 Subject: [PATCH 0136/1113] Make module_utils.known_hosts.get_fqdn work on ipv6 --- lib/ansible/module_utils/known_hosts.py | 16 +++++++++------- .../units/module_utils/basic/test_known_hosts.py | 8 ++++++++ 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/lib/ansible/module_utils/known_hosts.py b/lib/ansible/module_utils/known_hosts.py index 9b6af2a28e9..64ad0c76c2b 100644 --- a/lib/ansible/module_utils/known_hosts.py +++ b/lib/ansible/module_utils/known_hosts.py @@ -74,12 +74,12 @@ def get_fqdn(repo_url): if "@" in repo_url and "://" not in repo_url: # most likely an user@host:path or user@host/path type URL repo_url = repo_url.split("@", 1)[1] - if ":" in repo_url: - repo_url = repo_url.split(":")[0] - result = repo_url + if repo_url.startswith('['): + result = repo_url.split(']', 1)[0] + ']' + elif ":" in repo_url: + result = repo_url.split(":")[0] elif "/" in repo_url: - repo_url = repo_url.split("/")[0] - result = repo_url + result = repo_url.split("/")[0] elif "://" in repo_url: # this should be something we can parse with urlparse parts = urlparse.urlparse(repo_url) @@ -87,11 +87,13 @@ def get_fqdn(repo_url): # ensure we actually have a parts[1] before continuing. if parts[1] != '': result = parts[1] - if ":" in result: - result = result.split(":")[0] if "@" in result: result = result.split("@", 1)[1] + if result[0].startswith('['): + result = result.split(']', 1)[0] + ']' + elif ":" in result: + result = result.split(":")[0] return result def check_hostkey(module, fqdn): diff --git a/test/units/module_utils/basic/test_known_hosts.py b/test/units/module_utils/basic/test_known_hosts.py index 952184bfec9..515d67686de 100644 --- a/test/units/module_utils/basic/test_known_hosts.py +++ b/test/units/module_utils/basic/test_known_hosts.py @@ -33,6 +33,14 @@ class TestAnsibleModuleKnownHosts(unittest.TestCase): {'is_ssh_url': True, 'get_fqdn': 'five.example.org'}, 'ssh://six.example.org:21/example.org': {'is_ssh_url': True, 'get_fqdn': 'six.example.org'}, + 'ssh://[2001:DB8::abcd:abcd]/example.git': + {'is_ssh_url': True, 'get_fqdn': '[2001:DB8::abcd:abcd]'}, + 'ssh://[2001:DB8::abcd:abcd]:22/example.git': + {'is_ssh_url': True, 'get_fqdn': '[2001:DB8::abcd:abcd]'}, + 'username@[2001:DB8::abcd:abcd]/example.git': + {'is_ssh_url': True, 'get_fqdn': '[2001:DB8::abcd:abcd]'}, + 'username@[2001:DB8::abcd:abcd]:22/example.git': + {'is_ssh_url': True, 'get_fqdn': '[2001:DB8::abcd:abcd]'}, } def test_is_ssh_url(self): From 8d16638fec3e88e0f7b0dde24aae095100436644 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Mon, 14 Dec 2015 10:54:10 -0800 Subject: [PATCH 0137/1113] Fix for template module not creating a file that was not present when force=false --- lib/ansible/plugins/action/template.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py index 109f3e80c0b..d134f80a8df 100644 --- a/lib/ansible/plugins/action/template.py +++ b/lib/ansible/plugins/action/template.py @@ -150,7 +150,7 @@ class ActionModule(ActionBase): diff = {} new_module_args = self._task.args.copy() - if force and local_checksum != remote_checksum: + if (remote_checksum == '1') or (force and local_checksum != remote_checksum): result['changed'] = True # if showing diffs, we need to get the remote value From 73160e65e5708a506db2682348bf69d9ea97d3b9 Mon Sep 17 00:00:00 2001 From: Krzysztof Jurewicz Date: Tue, 15 Dec 2015 12:03:50 +0100 Subject: [PATCH 0138/1113] Use wrapped connect_to_region everywhere in ec2.py --- contrib/inventory/ec2.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contrib/inventory/ec2.py b/contrib/inventory/ec2.py index 4c5cf23fcb8..ff13aa9d05d 100755 --- a/contrib/inventory/ec2.py +++ b/contrib/inventory/ec2.py @@ -511,7 +511,7 @@ class Ec2Inventory(object): # that's why we need to call describe directly (it would be called by # the shorthand method anyway...) try: - conn = elasticache.connect_to_region(region) + conn = self.connect_to_aws(elasticache, region) if conn: # show_cache_node_info = True # because we also want nodes' information @@ -547,7 +547,7 @@ class Ec2Inventory(object): # that's why we need to call describe directly (it would be called by # the shorthand method anyway...) try: - conn = elasticache.connect_to_region(region) + conn = self.connect_to_aws(elasticache, region) if conn: response = conn.describe_replication_groups() From 27cd7668c152c5b2b74a10ffe78bfca7a11aeaac Mon Sep 17 00:00:00 2001 From: Peter Sprygada Date: Tue, 8 Dec 2015 07:34:09 -0500 Subject: [PATCH 0139/1113] the ssh shared module will try to use keys if the password is not supplied The current ssh shared module forces only password based authentication. This change will allow the ssh module to use keys if a password is not provided. --- lib/ansible/module_utils/ssh.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/lib/ansible/module_utils/ssh.py b/lib/ansible/module_utils/ssh.py index 343f017a988..00922ef8cdd 100644 --- a/lib/ansible/module_utils/ssh.py +++ b/lib/ansible/module_utils/ssh.py @@ -91,12 +91,17 @@ class Ssh(object): def __init__(self): self.client = None - def open(self, host, port=22, username=None, password=None, timeout=10): + def open(self, host, port=22, username=None, password=None, + timeout=10, key_filename=None): + ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + use_keys = password is None + ssh.connect(host, port=port, username=username, password=password, - timeout=timeout, allow_agent=False, look_for_keys=False) + timeout=timeout, allow_agent=use_keys, look_for_keys=use_keys, + key_filename=key_filename) self.client = ssh return self.on_open() From be4d1f9ee380705768574baefb75830e3c76afa2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yannig=20Perr=C3=A9?= Date: Tue, 15 Dec 2015 12:49:20 +0100 Subject: [PATCH 0140/1113] Fix a part of python 3 tests (make tests-py3, see https://github.com/ansible/ansible/issues/13553 for more details). --- lib/ansible/module_utils/known_hosts.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/ansible/module_utils/known_hosts.py b/lib/ansible/module_utils/known_hosts.py index 64ad0c76c2b..52b0bb74b0f 100644 --- a/lib/ansible/module_utils/known_hosts.py +++ b/lib/ansible/module_utils/known_hosts.py @@ -28,7 +28,11 @@ import os import hmac -import urlparse + +try: + import urlparse +except ImportError: + import urllib.parse as urlparse try: from hashlib import sha1 From a0842781a6a77a0e51ad411ab186395379cc4dcb Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 15 Dec 2015 08:44:43 -0500 Subject: [PATCH 0141/1113] renamed ssh.py shared module file to clarify --- lib/ansible/module_utils/{ssh.py => issh.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename lib/ansible/module_utils/{ssh.py => issh.py} (100%) diff --git a/lib/ansible/module_utils/ssh.py b/lib/ansible/module_utils/issh.py similarity index 100% rename from lib/ansible/module_utils/ssh.py rename to lib/ansible/module_utils/issh.py From be5488cb60869c67b0ea521a4044062157817e50 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 15 Dec 2015 09:27:53 -0500 Subject: [PATCH 0142/1113] clean debug output to match prev versions --- lib/ansible/plugins/callback/__init__.py | 6 ++++++ lib/ansible/plugins/callback/default.py | 1 + lib/ansible/plugins/callback/minimal.py | 1 + 3 files changed, 8 insertions(+) diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py index b8a48943f28..7371fe0a51e 100644 --- a/lib/ansible/plugins/callback/__init__.py +++ b/lib/ansible/plugins/callback/__init__.py @@ -140,6 +140,12 @@ class CallbackBase: else: self.v2_playbook_item_on_ok(newres) + def _clean_results(self, result, task_name): + if 'changed' in result and task_name in ['debug']: + del result['changed'] + if 'invocation' in result and task_name in ['debug']: + del result['invocation'] + def set_play_context(self, play_context): pass diff --git a/lib/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py index 1f37f4b975e..e515945bba5 100644 --- a/lib/ansible/plugins/callback/default.py +++ b/lib/ansible/plugins/callback/default.py @@ -62,6 +62,7 @@ class CallbackModule(CallbackBase): def v2_runner_on_ok(self, result): + self._clean_results(result._result, result._task.action) delegated_vars = result._result.get('_ansible_delegated_vars', None) if result._task.action == 'include': return diff --git a/lib/ansible/plugins/callback/minimal.py b/lib/ansible/plugins/callback/minimal.py index f855c1a6e53..71f9f5dfeef 100644 --- a/lib/ansible/plugins/callback/minimal.py +++ b/lib/ansible/plugins/callback/minimal.py @@ -64,6 +64,7 @@ class CallbackModule(CallbackBase): self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color='red') def v2_runner_on_ok(self, result): + self._clean_results(result._result, result._task.action) if result._task.action in C.MODULE_NO_JSON: self._display.display(self._command_generic_msg(result._host.get_name(), result._result, "SUCCESS"), color='green') else: From fcc9258b743d2f596628f28dd4cdc01f0f8d306e Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 16 Dec 2015 01:48:22 -0500 Subject: [PATCH 0143/1113] Use the original host rather than the serialized one when processing results Fixes #13526 Fixes #13564 Fixes #13566 --- lib/ansible/plugins/strategy/__init__.py | 25 +++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index 4047bde73a2..d2d79d036bd 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -185,10 +185,20 @@ class StrategyBase: result = self._final_q.get() display.debug("got result from result worker: %s" % ([text_type(x) for x in result],)) + # helper method, used to find the original host from the one + # returned in the result/message, which has been serialized and + # thus had some information stripped from it to speed up the + # serialization process + def get_original_host(host): + if host.name in self._inventory._hosts_cache: + return self._inventory._hosts_cache[host.name] + else: + return self._inventory.get_host(host.name) + # all host status messages contain 2 entries: (msg, task_result) if result[0] in ('host_task_ok', 'host_task_failed', 'host_task_skipped', 'host_unreachable'): task_result = result[1] - host = task_result._host + host = get_original_host(task_result._host) task = task_result._task if result[0] == 'host_task_failed' or task_result.is_failed(): if not task.ignore_errors: @@ -244,7 +254,7 @@ class StrategyBase: self._add_host(new_host_info, iterator) elif result[0] == 'add_group': - host = result[1] + host = get_original_host(result[1]) result_item = result[2] self._add_group(host, result_item) @@ -252,19 +262,20 @@ class StrategyBase: task_result = result[1] handler_name = result[2] - original_task = iterator.get_original_task(task_result._host, task_result._task) + original_host = get_original_host(task_result._host) + original_task = iterator.get_original_task(original_host, task_result._task) if handler_name not in self._notified_handlers: self._notified_handlers[handler_name] = [] - if task_result._host not in self._notified_handlers[handler_name]: - self._notified_handlers[handler_name].append(task_result._host) + if original_host not in self._notified_handlers[handler_name]: + self._notified_handlers[handler_name].append(original_host) display.vv("NOTIFIED HANDLER %s" % (handler_name,)) elif result[0] == 'register_host_var': # essentially the same as 'set_host_var' below, however we # never follow the delegate_to value for registered vars and # the variable goes in the fact_cache - host = result[1] + host = get_original_host(result[1]) task = result[2] var_value = wrap_var(result[3]) var_name = task.register @@ -278,7 +289,7 @@ class StrategyBase: self._variable_manager.set_nonpersistent_facts(target_host, {var_name: var_value}) elif result[0] in ('set_host_var', 'set_host_facts'): - host = result[1] + host = get_original_host(result[1]) task = result[2] item = result[3] From 9942d71d345cf221dbcdb19f362d80430d995905 Mon Sep 17 00:00:00 2001 From: Matt Clay Date: Wed, 16 Dec 2015 01:37:02 -0800 Subject: [PATCH 0144/1113] Test for filename option in apt_repository module. --- .../roles/test_apt_repository/tasks/apt.yml | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/test/integration/roles/test_apt_repository/tasks/apt.yml b/test/integration/roles/test_apt_repository/tasks/apt.yml index 49d13bc52a3..9c8e3ab4473 100644 --- a/test/integration/roles/test_apt_repository/tasks/apt.yml +++ b/test/integration/roles/test_apt_repository/tasks/apt.yml @@ -2,6 +2,7 @@ - set_fact: test_ppa_name: 'ppa:menulibre-dev/devel' + test_ppa_filename: 'menulibre-dev' test_ppa_spec: 'deb http://ppa.launchpad.net/menulibre-dev/devel/ubuntu {{ansible_distribution_release}} main' test_ppa_key: 'A7AD98A1' # http://keyserver.ubuntu.com:11371/pks/lookup?search=0xD06AAF4C11DAB86DF421421EFE6B20ECA7AD98A1&op=index @@ -144,6 +145,47 @@ - name: 'ensure ppa key is absent (expect: pass)' apt_key: id='{{test_ppa_key}}' state=absent +# +# TEST: apt_repository: repo= filename= +# +- include: 'cleanup.yml' + +- name: 'record apt cache mtime' + stat: path='/var/cache/apt/pkgcache.bin' + register: cache_before + +- name: 'name= filename= (expect: pass)' + apt_repository: repo='{{test_ppa_spec}}' filename='{{test_ppa_filename}}' state=present + register: result + +- assert: + that: + - 'result.changed' + - 'result.state == "present"' + - 'result.repo == "{{test_ppa_spec}}"' + +- name: 'examine source file' + stat: path='/etc/apt/sources.list.d/{{test_ppa_filename}}.list' + register: source_file + +- name: 'assert source file exists' + assert: + that: + - 'source_file.stat.exists == True' + +- name: 'examine apt cache mtime' + stat: path='/var/cache/apt/pkgcache.bin' + register: cache_after + +- name: 'assert the apt cache did change' + assert: + that: + - 'cache_before.stat.mtime != cache_after.stat.mtime' + +# When installing a repo with the spec, the key is *NOT* added +- name: 'ensure ppa key is absent (expect: pass)' + apt_key: id='{{test_ppa_key}}' state=absent + # # TEARDOWN # From 63b624707d0bcb057cec7c81d86b511106cba512 Mon Sep 17 00:00:00 2001 From: David Date: Wed, 16 Dec 2015 23:46:06 +0800 Subject: [PATCH 0145/1113] Fix typo --- docsite/rst/playbooks_roles.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/playbooks_roles.rst b/docsite/rst/playbooks_roles.rst index 516403ac805..c6c01db5d48 100644 --- a/docsite/rst/playbooks_roles.rst +++ b/docsite/rst/playbooks_roles.rst @@ -132,7 +132,7 @@ Note that you cannot do variable substitution when including one playbook inside another. .. note:: - You can not conditionally path the location to an include file, + You can not conditionally pass the location to an include file, like you can with 'vars_files'. If you find yourself needing to do this, consider how you can restructure your playbook to be more class/role oriented. This is to say you cannot use a 'fact' to From 73ead4fbbadb8ad874f95f0dd542256b2ad730aa Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Mon, 14 Dec 2015 20:05:55 -0800 Subject: [PATCH 0146/1113] First attempt to fix https certificate errors through a proxy with python-2.7.9+ Fixes #12549 --- lib/ansible/module_utils/urls.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py index 979d5943dde..0f45c360349 100644 --- a/lib/ansible/module_utils/urls.py +++ b/lib/ansible/module_utils/urls.py @@ -326,11 +326,15 @@ class CustomHTTPSConnection(httplib.HTTPSConnection): sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) else: sock = socket.create_connection((self.host, self.port), self.timeout) + + server_hostname = self.host if self._tunnel_host: self.sock = sock self._tunnel() + server_hostname = self._tunnel_host + if HAS_SSLCONTEXT: - self.sock = self.context.wrap_socket(sock, server_hostname=self.host) + self.sock = self.context.wrap_socket(sock, server_hostname=server_hostname) else: self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL) @@ -542,7 +546,7 @@ class SSLValidationHandler(urllib2.BaseHandler): connect_result = s.recv(4096) self.validate_proxy_response(connect_result) if context: - ssl_s = context.wrap_socket(s, server_hostname=proxy_parts.get('hostname')) + ssl_s = context.wrap_socket(s, server_hostname=self.hostname) else: ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL) match_hostname(ssl_s.getpeercert(), self.hostname) From 72a0654b81aec47e9fa989ba8c1d50a55a093f6f Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Tue, 15 Dec 2015 15:35:13 -0800 Subject: [PATCH 0147/1113] Fixes for proxy on RHEL5 --- lib/ansible/module_utils/urls.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py index 0f45c360349..d0ee260e17f 100644 --- a/lib/ansible/module_utils/urls.py +++ b/lib/ansible/module_utils/urls.py @@ -328,6 +328,8 @@ class CustomHTTPSConnection(httplib.HTTPSConnection): sock = socket.create_connection((self.host, self.port), self.timeout) server_hostname = self.host + # Note: self._tunnel_host is not available on py < 2.6 but this code + # isn't used on py < 2.6 (lack of create_connection) if self._tunnel_host: self.sock = sock self._tunnel() @@ -377,7 +379,10 @@ def generic_urlparse(parts): # get the username, password, etc. try: netloc_re = re.compile(r'^((?:\w)+(?::(?:\w)+)?@)?([A-Za-z0-9.-]+)(:\d+)?$') - (auth, hostname, port) = netloc_re.match(parts[1]) + match = netloc_re.match(parts[1]) + auth = match.group(1) + hostname = match.group(2) + port = match.group(3) if port: # the capture group for the port will include the ':', # so remove it and convert the port to an integer @@ -387,6 +392,8 @@ def generic_urlparse(parts): # and then split it up based on the first ':' found auth = auth[:-1] username, password = auth.split(':', 1) + else: + username = password = None generic_parts['username'] = username generic_parts['password'] = password generic_parts['hostname'] = hostname @@ -394,7 +401,7 @@ def generic_urlparse(parts): except: generic_parts['username'] = None generic_parts['password'] = None - generic_parts['hostname'] = None + generic_parts['hostname'] = parts[1] generic_parts['port'] = None return generic_parts @@ -536,7 +543,8 @@ class SSLValidationHandler(urllib2.BaseHandler): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if https_proxy: proxy_parts = generic_urlparse(urlparse.urlparse(https_proxy)) - s.connect((proxy_parts.get('hostname'), proxy_parts.get('port'))) + port = proxy_parts.get('port') or 443 + s.connect((proxy_parts.get('hostname'), port)) if proxy_parts.get('scheme') == 'http': s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port)) if proxy_parts.get('username'): From 33863eb653f3ed4d6f30ab816743443f473c5eae Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 16 Dec 2015 07:38:51 -0800 Subject: [PATCH 0148/1113] Conditionally create the CustomHTTPSConnection class only if we have the required baseclasses. Fixes #11918 --- lib/ansible/module_utils/urls.py | 74 +++++++++++++++++--------------- 1 file changed, 39 insertions(+), 35 deletions(-) diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py index d0ee260e17f..41613f6cb61 100644 --- a/lib/ansible/module_utils/urls.py +++ b/lib/ansible/module_utils/urls.py @@ -310,42 +310,45 @@ class NoSSLError(SSLValidationError): """Needed to connect to an HTTPS url but no ssl library available to verify the certificate""" pass +# Some environments (Google Compute Engine's CoreOS deploys) do not compile +# against openssl and thus do not have any HTTPS support. +CustomHTTPSConnection = CustomHTTPSHandler = None +if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib2, 'HTTPSHandler'): + class CustomHTTPSConnection(httplib.HTTPSConnection): + def __init__(self, *args, **kwargs): + httplib.HTTPSConnection.__init__(self, *args, **kwargs) + if HAS_SSLCONTEXT: + self.context = create_default_context() + if self.cert_file: + self.context.load_cert_chain(self.cert_file, self.key_file) + + def connect(self): + "Connect to a host on a given (SSL) port." + + if hasattr(self, 'source_address'): + sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) + else: + sock = socket.create_connection((self.host, self.port), self.timeout) + + server_hostname = self.host + # Note: self._tunnel_host is not available on py < 2.6 but this code + # isn't used on py < 2.6 (lack of create_connection) + if self._tunnel_host: + self.sock = sock + self._tunnel() + server_hostname = self._tunnel_host + + if HAS_SSLCONTEXT: + self.sock = self.context.wrap_socket(sock, server_hostname=server_hostname) + else: + self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL) -class CustomHTTPSConnection(httplib.HTTPSConnection): - def __init__(self, *args, **kwargs): - httplib.HTTPSConnection.__init__(self, *args, **kwargs) - if HAS_SSLCONTEXT: - self.context = create_default_context() - if self.cert_file: - self.context.load_cert_chain(self.cert_file, self.key_file) - - def connect(self): - "Connect to a host on a given (SSL) port." - - if hasattr(self, 'source_address'): - sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) - else: - sock = socket.create_connection((self.host, self.port), self.timeout) - - server_hostname = self.host - # Note: self._tunnel_host is not available on py < 2.6 but this code - # isn't used on py < 2.6 (lack of create_connection) - if self._tunnel_host: - self.sock = sock - self._tunnel() - server_hostname = self._tunnel_host - - if HAS_SSLCONTEXT: - self.sock = self.context.wrap_socket(sock, server_hostname=server_hostname) - else: - self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL) - -class CustomHTTPSHandler(urllib2.HTTPSHandler): + class CustomHTTPSHandler(urllib2.HTTPSHandler): - def https_open(self, req): - return self.do_open(CustomHTTPSConnection, req) + def https_open(self, req): + return self.do_open(CustomHTTPSConnection, req) - https_request = urllib2.AbstractHTTPHandler.do_request_ + https_request = urllib2.AbstractHTTPHandler.do_request_ def generic_urlparse(parts): ''' @@ -673,8 +676,9 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True, handlers.append(proxyhandler) # pre-2.6 versions of python cannot use the custom https - # handler, since the socket class is lacking this method - if hasattr(socket, 'create_connection'): + # handler, since the socket class is lacking create_connection. + # Some python builds lack HTTPS support. + if hasattr(socket, 'create_connection') and CustomHTTPSHandler: handlers.append(CustomHTTPSHandler) opener = urllib2.build_opener(*handlers) From 0095d04af9712c0c026b29e45dbe57a70e30f1e0 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 16 Dec 2015 08:02:46 -0800 Subject: [PATCH 0149/1113] Update submodule refs --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index e6b7b17326b..50e7bff5546 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit e6b7b17326b4c9d11501112270c52ae25955938a +Subproject commit 50e7bff554647ccd8a34729171420e72b3a00c61 diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index f3251de29cb..bde5686552f 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit f3251de29cb10664b2c63a0021530c3fe34111a3 +Subproject commit bde5686552fdd88a758c7197b2eebe98b1afbf07 From 6a252a3f7727649c61c007e73f04201fd6fbdfa8 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 16 Dec 2015 11:21:19 -0500 Subject: [PATCH 0150/1113] Preserve the cumulative path for checking includes which have parents Otherwise, each relative include path is checked on its own, rather than in relation to the (possibly relative) path of its parent, meaning includes multiple level deep may fail to find the correct (or any) file. Fixes #13472 --- lib/ansible/playbook/included_file.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/lib/ansible/playbook/included_file.py b/lib/ansible/playbook/included_file.py index b7c0fb81756..7fb851a12af 100644 --- a/lib/ansible/playbook/included_file.py +++ b/lib/ansible/playbook/included_file.py @@ -81,14 +81,19 @@ class IncludedFile: # handle relative includes by walking up the list of parent include # tasks and checking the relative result to see if it exists parent_include = original_task._task_include + cumulative_path = None while parent_include is not None: parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params'))) + if cumulative_path is None: + cumulative_path = parent_include_dir + elif not os.path.isabs(cumulative_path): + cumulative_path = os.path.join(parent_include_dir, cumulative_path) include_target = templar.template(include_result['include']) if original_task._role: - new_basedir = os.path.join(original_task._role._role_path, 'tasks', parent_include_dir) + new_basedir = os.path.join(original_task._role._role_path, 'tasks', cumulative_path) include_file = loader.path_dwim_relative(new_basedir, 'tasks', include_target) else: - include_file = loader.path_dwim_relative(loader.get_basedir(), parent_include_dir, include_target) + include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target) if os.path.exists(include_file): break From 375eb501b3b1edf7fd91807374edfcd60ca736b8 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 16 Dec 2015 09:40:01 -0800 Subject: [PATCH 0151/1113] Update url to site that has an invalid certificate --- test/integration/roles/test_get_url/tasks/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index 6e3842f6abf..09ee34277a0 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -28,7 +28,7 @@ - name: test https fetch to a site with mismatched hostname and certificate get_url: - url: "https://kennethreitz.org/" + url: "https://www.kennethreitz.org/" dest: "{{ output_dir }}/shouldnotexist.html" ignore_errors: True register: result @@ -46,7 +46,7 @@ - name: test https fetch to a site with mismatched hostname and certificate and validate_certs=no get_url: - url: "https://kennethreitz.org/" + url: "https://www.kennethreitz.org/" dest: "{{ output_dir }}/kreitz.html" validate_certs: no register: result From 34e88e48a567d52e3ed0c3ecb6a5aa578e53dd19 Mon Sep 17 00:00:00 2001 From: Jonathan Mainguy Date: Mon, 16 Nov 2015 22:08:15 -0500 Subject: [PATCH 0152/1113] Add shared connection code for mysql modules --- lib/ansible/module_utils/mysql.py | 66 +++++++++++++++ .../utils/module_docs_fragments/mysql.py | 84 +++++++++++++++++++ .../tasks/user_password_update_test.yml | 1 - .../tasks/assert_fail_msg.yml | 2 - 4 files changed, 150 insertions(+), 3 deletions(-) create mode 100644 lib/ansible/module_utils/mysql.py create mode 100644 lib/ansible/utils/module_docs_fragments/mysql.py diff --git a/lib/ansible/module_utils/mysql.py b/lib/ansible/module_utils/mysql.py new file mode 100644 index 00000000000..48e00adfd9c --- /dev/null +++ b/lib/ansible/module_utils/mysql.py @@ -0,0 +1,66 @@ +# This code is part of Ansible, but is an independent component. +# This particular file snippet, and this file snippet only, is BSD licensed. +# Modules you write using this snippet, which is embedded dynamically by Ansible +# still belong to the author of the module, and may assign their own license +# to the complete work. +# +# Copyright (c), Jonathan Mainguy , 2015 +# Most of this was originally added by Sven Schliesing @muffl0n in the mysql_user.py module +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +def mysql_connect(module, login_user=None, login_password=None, config_file='', ssl_cert=None, ssl_key=None, ssl_ca=None, db=None, cursor_class=None): + config = { + 'host': module.params['login_host'], + 'ssl': { + } + } + + if module.params['login_unix_socket']: + config['unix_socket'] = module.params['login_unix_socket'] + else: + config['port'] = module.params['login_port'] + + if os.path.exists(config_file): + config['read_default_file'] = config_file + + # If login_user or login_password are given, they should override the + # config file + if login_user is not None: + config['user'] = login_user + if login_password is not None: + config['passwd'] = login_password + if ssl_cert is not None: + config['ssl']['cert'] = ssl_cert + if ssl_key is not None: + config['ssl']['key'] = ssl_key + if ssl_ca is not None: + config['ssl']['ca'] = ssl_ca + if db is not None: + config['db'] = db + + db_connection = MySQLdb.connect(**config) + if cursor_class is not None: + return db_connection.cursor(cursorclass=MySQLdb.cursors.DictCursor) + else: + return db_connection.cursor() diff --git a/lib/ansible/utils/module_docs_fragments/mysql.py b/lib/ansible/utils/module_docs_fragments/mysql.py new file mode 100644 index 00000000000..5dd1e04f93b --- /dev/null +++ b/lib/ansible/utils/module_docs_fragments/mysql.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2015 Jonathan Mainguy +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + + +class ModuleDocFragment(object): + + # Standard mysql documentation fragment + DOCUMENTATION = ''' +options: + login_user: + description: + - The username used to authenticate with + required: false + default: null + login_password: + description: + - The password used to authenticate with + required: false + default: null + login_host: + description: + - Host running the database + required: false + default: localhost + login_port: + description: + - Port of the MySQL server. Requires login_host be defined as other then localhost if login_port is used + required: false + default: 3306 + login_unix_socket: + description: + - The path to a Unix domain socket for local connections + required: false + default: null + config_file: + description: + - Specify a config file from which user and password are to be read + required: false + default: '~/.my.cnf' + version_added: "2.0" + ssl_ca: + required: false + default: null + version_added: "2.0" + description: + - The path to a Certificate Authority (CA) certificate. This option, if used, must specify the same certificate as used by the server. + ssl_cert: + required: false + default: null + version_added: "2.0" + description: + - The path to a client public key certificate. + ssl_key: + required: false + default: null + version_added: "2.0" + description: + - The path to the client private key. +requirements: + - MySQLdb +notes: + - Requires the MySQLdb Python package on the remote host. For Ubuntu, this + is as easy as apt-get install python-mysqldb. (See M(apt).) For CentOS/Fedora, this + is as easy as yum install MySQL-python. (See M(yum).) + - Both C(login_password) and C(login_user) are required when you are + passing credentials. If none are present, the module will attempt to read + the credentials from C(~/.my.cnf), and finally fall back to using the MySQL + default login of 'root' with no password. +''' diff --git a/test/integration/roles/test_mysql_user/tasks/user_password_update_test.yml b/test/integration/roles/test_mysql_user/tasks/user_password_update_test.yml index 50307cef956..904165c33ec 100644 --- a/test/integration/roles/test_mysql_user/tasks/user_password_update_test.yml +++ b/test/integration/roles/test_mysql_user/tasks/user_password_update_test.yml @@ -63,7 +63,6 @@ assert: that: - "result.failed == true" - - "'check login credentials (login_user, and login_password' in result.msg" - name: create database using user2 and new password mysql_db: name={{ db_name }} state=present login_user={{ user_name_2 }} login_password={{ user_password_1 }} diff --git a/test/integration/roles/test_mysql_variables/tasks/assert_fail_msg.yml b/test/integration/roles/test_mysql_variables/tasks/assert_fail_msg.yml index 70aa26856ed..ba51b9d67cb 100644 --- a/test/integration/roles/test_mysql_variables/tasks/assert_fail_msg.yml +++ b/test/integration/roles/test_mysql_variables/tasks/assert_fail_msg.yml @@ -23,5 +23,3 @@ assert: that: - "output.failed == true" - - "'{{msg}}' in output.msg" - From 851c0058b148ce041af5ca5c9fbdf25ff854cf8f Mon Sep 17 00:00:00 2001 From: Chrrrles Paul Date: Wed, 16 Dec 2015 12:45:05 -0600 Subject: [PATCH 0153/1113] Removing yaml support for path: --- docsite/rst/galaxy.rst | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docsite/rst/galaxy.rst b/docsite/rst/galaxy.rst index c9dea273367..f4ca16cb8f1 100644 --- a/docsite/rst/galaxy.rst +++ b/docsite/rst/galaxy.rst @@ -73,10 +73,6 @@ And here's an example showing some specific version downloads from multiple sour # from GitHub - src: https://github.com/bennojoy/nginx - # from GitHub installing to a relative path - - src: https://github.com/bennojoy/nginx - path: vagrant/roles/ - # from GitHub, overriding the name and specifying a specific tag - src: https://github.com/bennojoy/nginx version: master @@ -98,7 +94,6 @@ And here's an example showing some specific version downloads from multiple sour - src: git@gitlab.company.com:mygroup/ansible-base.git scm: git version: 0.1.0 - path: roles/ As you can see in the above, there are a large amount of controls available to customize where roles can be pulled from, and what to save roles as. From 6109f703970d741df6e2e28e750667f5d0083fda Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 16 Dec 2015 13:56:55 -0500 Subject: [PATCH 0154/1113] Attempt at fixing strategy unit test failures on py2.6 and py3 --- test/units/plugins/strategies/test_strategy_base.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/units/plugins/strategies/test_strategy_base.py b/test/units/plugins/strategies/test_strategy_base.py index 7cc81a0324e..53e243f926b 100644 --- a/test/units/plugins/strategies/test_strategy_base.py +++ b/test/units/plugins/strategies/test_strategy_base.py @@ -377,9 +377,7 @@ class TestStrategyBase(unittest.TestCase): strategy_base._inventory = mock_inventory strategy_base._notified_handlers = {"test handler": [mock_host]} - mock_return_task = MagicMock(Handler) - mock_return_host = MagicMock(Host) - task_result = TaskResult(mock_return_host, mock_return_task, dict(changed=False)) + task_result = TaskResult(Host('host01'), Handler(), dict(changed=False)) tqm._final_q.put(('host_task_ok', task_result)) result = strategy_base.run_handlers(iterator=mock_iterator, play_context=mock_play_context) From 9724117bbb6c09a4d6d2e1f6573e69db697bdcc7 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 16 Dec 2015 11:15:39 -0800 Subject: [PATCH 0155/1113] Update submodule refs for mysql refactor --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 50e7bff5546..3c48320b295 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 50e7bff554647ccd8a34729171420e72b3a00c61 +Subproject commit 3c48320b295c3b4f99caccdc5f173b224109a393 diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index bde5686552f..8ec4f95ffd6 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit bde5686552fdd88a758c7197b2eebe98b1afbf07 +Subproject commit 8ec4f95ffd6d4e837cf0f3dd28649fb09afd0caf From baece499dfb6a8d8556db2b686d4f3c86d1d25b1 Mon Sep 17 00:00:00 2001 From: nitzmahone Date: Wed, 16 Dec 2015 11:47:12 -0800 Subject: [PATCH 0156/1113] fix plugin loading for Windows modules force plugin loader to only consider .py files, since that's the only place docs can live ATM... --- lib/ansible/cli/doc.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py index a17164eb50e..265b1c9a3fc 100644 --- a/lib/ansible/cli/doc.py +++ b/lib/ansible/cli/doc.py @@ -90,7 +90,8 @@ class DocCLI(CLI): for module in self.args: try: - filename = module_loader.find_plugin(module) + # if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs + filename = module_loader.find_plugin(module, mod_type='.py') if filename is None: display.warning("module %s not found in %s\n" % (module, DocCLI.print_paths(module_loader))) continue @@ -167,7 +168,8 @@ class DocCLI(CLI): if module in module_docs.BLACKLIST_MODULES: continue - filename = module_loader.find_plugin(module) + # if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs + filename = module_loader.find_plugin(module, mod_type='.py') if filename is None: continue From 491fd754f1cbe1944b0f45690842fd49b5977775 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 16 Dec 2015 16:35:56 -0500 Subject: [PATCH 0157/1113] Updating the porting guide to note the complex args/bare vars change Related to #13518 --- docsite/rst/porting_guide_2.0.rst | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docsite/rst/porting_guide_2.0.rst b/docsite/rst/porting_guide_2.0.rst index 8d69ecd4403..543be052bdc 100644 --- a/docsite/rst/porting_guide_2.0.rst +++ b/docsite/rst/porting_guide_2.0.rst @@ -55,6 +55,24 @@ uses key=value escaping which has not changed. The other option is to check for # Output "msg": "Testing some things" +* When specifying complex args as a variable, the variable must use the full jinja2 + variable syntax ('{{var_name}}') - bare variable names there are no longer accepted. + In fact, even specifying args with variables has been deprecated, and will not be + allowed in future versions:: + + --- + - hosts: localhost + connection: local + gather_facts: false + vars: + my_dirs: + - { path: /tmp/3a, state: directory, mode: 0755 } + - { path: /tmp/3b, state: directory, mode: 0700 } + tasks: + - file: + args: "{{item}}" # <- args here uses the full variable syntax + with_items: my_dirs + * porting task includes * More dynamic. Corner-case formats that were not supposed to work now do not, as expected. * variables defined in the yaml dict format https://github.com/ansible/ansible/issues/13324 From 8716bf8021800a18cb8d6cfea3f296ba4f834692 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 16 Dec 2015 16:32:06 -0500 Subject: [PATCH 0158/1113] All variables in complex args again Also updates the CHANGELOG to note the slight change, where bare variables in args are no longer allowed to be bare variables Fixes #13518 --- CHANGELOG.md | 20 ++++++++++++++++++++ lib/ansible/parsing/mod_args.py | 11 ++++++++++- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c6319634fb7..005171ec9a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -82,6 +82,26 @@ newline being stripped you can change your playbook like this: "msg": "Testing some things" ``` +* When specifying complex args as a variable, the variable must use the full jinja2 +variable syntax ('{{var_name}}') - bare variable names there are no longer accepted. +In fact, even specifying args with variables has been deprecated, and will not be +allowed in future versions: + + ``` + --- + - hosts: localhost + connection: local + gather_facts: false + vars: + my_dirs: + - { path: /tmp/3a, state: directory, mode: 0755 } + - { path: /tmp/3b, state: directory, mode: 0700 } + tasks: + - file: + args: "{{item}}" + with_items: my_dirs + ``` + ###Plugins * Rewritten dnf module that should be faster and less prone to encountering bugs in cornercases diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py index abc35a415e3..86b2d0d996d 100644 --- a/lib/ansible/parsing/mod_args.py +++ b/lib/ansible/parsing/mod_args.py @@ -137,7 +137,16 @@ class ModuleArgsParser: # than those which may be parsed/normalized next final_args = dict() if additional_args: - final_args.update(additional_args) + if isinstance(additional_args, string_types): + templar = Templar(loader=None) + if templar._contains_vars(additional_args): + final_args['_variable_params'] = additional_args + else: + raise AnsibleParserError("Complex args containing variables cannot use bare variables, and must use the full variable style ('{{var_name}}')") + elif isinstance(additional_args, dict): + final_args.update(additional_args) + else: + raise AnsibleParserError('Complex args must be a dictionary or variable string ("{{var}}").') # how we normalize depends if we figured out what the module name is # yet. If we have already figured it out, it's an 'old style' invocation. From fffd29d1ab15dc93a2854f874695b63e15d5c198 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 16 Dec 2015 14:06:11 -0800 Subject: [PATCH 0159/1113] Update mysql setup to handle installing mysql with dnf too. --- test/integration/roles/setup_mysql_db/tasks/main.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/integration/roles/setup_mysql_db/tasks/main.yml b/test/integration/roles/setup_mysql_db/tasks/main.yml index a8010e71389..612d94f6d11 100644 --- a/test/integration/roles/setup_mysql_db/tasks/main.yml +++ b/test/integration/roles/setup_mysql_db/tasks/main.yml @@ -31,6 +31,11 @@ with_items: mysql_packages when: ansible_pkg_mgr == 'yum' +- name: install mysqldb_test rpm dependencies + dnf: name={{ item }} state=latest + with_items: mysql_packages + when: ansible_pkg_mgr == 'dnf' + - name: install mysqldb_test debian dependencies apt: name={{ item }} state=latest with_items: mysql_packages From fd4ad2c8f24be48e2fa103a6b8feae287c4b57fe Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Wed, 16 Dec 2015 14:08:08 -0800 Subject: [PATCH 0160/1113] Update submodule ref to fix a bug in mysql_user with mariadb --- lib/ansible/modules/core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 3c48320b295..16a3bdaa7da 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 3c48320b295c3b4f99caccdc5f173b224109a393 +Subproject commit 16a3bdaa7da9e9f7c0572d3a3fdbfd79f29c2b9d From 857456ea5f159bbd333528aa6111b1510e1be78b Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 16 Dec 2015 18:21:47 -0500 Subject: [PATCH 0161/1113] Fixing template integration test for python 2.6 versions No longer immediately fallback to to_json if simplejson is not installed --- lib/ansible/plugins/filter/core.py | 4 +++- test/integration/roles/test_template/tasks/main.yml | 7 ------- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/lib/ansible/plugins/filter/core.py b/lib/ansible/plugins/filter/core.py index 3ab9db5a51b..dc9acb4d092 100644 --- a/lib/ansible/plugins/filter/core.py +++ b/lib/ansible/plugins/filter/core.py @@ -100,9 +100,11 @@ def to_nice_json(a, *args, **kw): else: if major >= 2: return simplejson.dumps(a, indent=4, sort_keys=True, *args, **kw) + try: + return json.dumps(a, indent=4, sort_keys=True, cls=AnsibleJSONEncoder, *args, **kw) + except: # Fallback to the to_json filter return to_json(a, *args, **kw) - return json.dumps(a, indent=4, sort_keys=True, cls=AnsibleJSONEncoder, *args, **kw) def bool(a): ''' return a bool for the arg ''' diff --git a/test/integration/roles/test_template/tasks/main.yml b/test/integration/roles/test_template/tasks/main.yml index 28477d44e5b..9fd1d860e00 100644 --- a/test/integration/roles/test_template/tasks/main.yml +++ b/test/integration/roles/test_template/tasks/main.yml @@ -49,13 +49,6 @@ - name: copy known good into place copy: src=foo.txt dest={{output_dir}}/foo.txt -# Seems that python-2.6 now outputs the same format as everywhere else? -# when: pyver.stdout != '2.6' - -#- name: copy known good into place -# copy: src=foo-py26.txt dest={{output_dir}}/foo.txt -# when: pyver.stdout == '2.6' - - name: compare templated file to known good shell: diff {{output_dir}}/foo.templated {{output_dir}}/foo.txt register: diff_result From 15135f3c16a87f68bede61415f2571097eaa6268 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 16 Dec 2015 19:12:05 -0500 Subject: [PATCH 0162/1113] Make sure we're using the original host when processing include results Also fixes a bug where we were passing an incorrect number of parameters to _do_handler_run() when processing an include file in a handler task/block. Fixes #13560 --- lib/ansible/playbook/included_file.py | 15 +++++++++++---- lib/ansible/plugins/strategy/__init__.py | 2 ++ lib/ansible/plugins/strategy/free.py | 10 ++++++++-- lib/ansible/plugins/strategy/linear.py | 10 ++++++++-- 4 files changed, 29 insertions(+), 8 deletions(-) diff --git a/lib/ansible/playbook/included_file.py b/lib/ansible/playbook/included_file.py index 7fb851a12af..cc756a75a96 100644 --- a/lib/ansible/playbook/included_file.py +++ b/lib/ansible/playbook/included_file.py @@ -49,9 +49,15 @@ class IncludedFile: return "%s (%s): %s" % (self._filename, self._args, self._hosts) @staticmethod - def process_include_results(results, tqm, iterator, loader, variable_manager): + def process_include_results(results, tqm, iterator, inventory, loader, variable_manager): included_files = [] + def get_original_host(host): + if host.name in inventory._hosts_cache: + return inventory._hosts_cache[host.name] + else: + return inventory.get_host(host.name) + for res in results: if res._task.action == 'include': @@ -67,9 +73,10 @@ class IncludedFile: if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result: continue - original_task = iterator.get_original_task(res._host, res._task) + original_host = get_original_host(res._host) + original_task = iterator.get_original_task(original_host, res._task) - task_vars = variable_manager.get_vars(loader=loader, play=iterator._play, host=res._host, task=original_task) + task_vars = variable_manager.get_vars(loader=loader, play=iterator._play, host=original_host, task=original_task) templar = Templar(loader=loader, variables=task_vars) include_variables = include_result.get('include_variables', dict()) @@ -116,6 +123,6 @@ class IncludedFile: except ValueError: included_files.append(inc_file) - inc_file.add_host(res._host) + inc_file.add_host(original_host) return included_files diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index d2d79d036bd..7b2a3794efc 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -576,6 +576,7 @@ class StrategyBase: host_results, self._tqm, iterator=iterator, + inventory=self._inventory, loader=self._loader, variable_manager=self._variable_manager ) @@ -594,6 +595,7 @@ class StrategyBase: for task in block.block: result = self._do_handler_run( handler=task, + handler_name=None, iterator=iterator, play_context=play_context, notified_hosts=included_file._hosts[:], diff --git a/lib/ansible/plugins/strategy/free.py b/lib/ansible/plugins/strategy/free.py index 11eeaa92494..f4fc1226a1f 100644 --- a/lib/ansible/plugins/strategy/free.py +++ b/lib/ansible/plugins/strategy/free.py @@ -139,8 +139,14 @@ class StrategyModule(StrategyBase): host_results.extend(results) try: - included_files = IncludedFile.process_include_results(host_results, self._tqm, iterator=iterator, - loader=self._loader, variable_manager=self._variable_manager) + included_files = IncludedFile.process_include_results( + host_results, + self._tqm, + iterator=iterator, + inventory=self._inventory, + loader=self._loader, + variable_manager=self._variable_manager + ) except AnsibleError as e: return False diff --git a/lib/ansible/plugins/strategy/linear.py b/lib/ansible/plugins/strategy/linear.py index 8c94267cf46..7bb227dbaea 100644 --- a/lib/ansible/plugins/strategy/linear.py +++ b/lib/ansible/plugins/strategy/linear.py @@ -261,8 +261,14 @@ class StrategyModule(StrategyBase): break try: - included_files = IncludedFile.process_include_results(host_results, self._tqm, - iterator=iterator, loader=self._loader, variable_manager=self._variable_manager) + included_files = IncludedFile.process_include_results( + host_results, + self._tqm, + iterator=iterator, + inventory=self._inventory, + loader=self._loader, + variable_manager=self._variable_manager + ) except AnsibleError as e: return False From e5c2c03dea0998872a6b16a18d6c187685a5fc7a Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Tue, 15 Dec 2015 09:39:13 -0500 Subject: [PATCH 0163/1113] Enable host_key checking at the strategy level Implements a new method in the ssh connection plugin (fetch_and_store_key) which is used to prefetch the key using ssh-keyscan. --- lib/ansible/executor/task_executor.py | 17 +- lib/ansible/inventory/host.py | 11 +- lib/ansible/plugins/connection/__init__.py | 5 +- lib/ansible/plugins/connection/ssh.py | 193 +++++++++++++++++++-- lib/ansible/plugins/strategy/__init__.py | 30 +++- lib/ansible/utils/connection.py | 50 ++++++ 6 files changed, 273 insertions(+), 33 deletions(-) create mode 100644 lib/ansible/utils/connection.py diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index 5d7430fad25..2623bc775b2 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -32,6 +32,7 @@ from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVar from ansible.playbook.conditional import Conditional from ansible.playbook.task import Task from ansible.template import Templar +from ansible.utils.connection import get_smart_connection_type from ansible.utils.encrypt import key_for_hostname from ansible.utils.listify import listify_lookup_plugin_terms from ansible.utils.unicode import to_unicode @@ -564,21 +565,7 @@ class TaskExecutor: conn_type = self._play_context.connection if conn_type == 'smart': - conn_type = 'ssh' - if sys.platform.startswith('darwin') and self._play_context.password: - # due to a current bug in sshpass on OSX, which can trigger - # a kernel panic even for non-privileged users, we revert to - # paramiko on that OS when a SSH password is specified - conn_type = "paramiko" - else: - # see if SSH can support ControlPersist if not use paramiko - try: - cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (out, err) = cmd.communicate() - if "Bad configuration option" in err or "Usage:" in err: - conn_type = "paramiko" - except OSError: - conn_type = "paramiko" + conn_type = get_smart_connection_type(self._play_context) connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin) if not connection: diff --git a/lib/ansible/inventory/host.py b/lib/ansible/inventory/host.py index 6263dcbc80d..70f9f57b5f1 100644 --- a/lib/ansible/inventory/host.py +++ b/lib/ansible/inventory/host.py @@ -57,6 +57,7 @@ class Host: name=self.name, vars=self.vars.copy(), address=self.address, + has_hostkey=self.has_hostkey, uuid=self._uuid, gathered_facts=self._gathered_facts, groups=groups, @@ -65,10 +66,11 @@ class Host: def deserialize(self, data): self.__init__() - self.name = data.get('name') - self.vars = data.get('vars', dict()) - self.address = data.get('address', '') - self._uuid = data.get('uuid', uuid.uuid4()) + self.name = data.get('name') + self.vars = data.get('vars', dict()) + self.address = data.get('address', '') + self.has_hostkey = data.get('has_hostkey', False) + self._uuid = data.get('uuid', uuid.uuid4()) groups = data.get('groups', []) for group_data in groups: @@ -89,6 +91,7 @@ class Host: self._gathered_facts = False self._uuid = uuid.uuid4() + self.has_hostkey = False def __repr__(self): return self.get_name() diff --git a/lib/ansible/plugins/connection/__init__.py b/lib/ansible/plugins/connection/__init__.py index 06616bac4ca..7fc19c8c195 100644 --- a/lib/ansible/plugins/connection/__init__.py +++ b/lib/ansible/plugins/connection/__init__.py @@ -23,11 +23,11 @@ __metaclass__ = type import fcntl import gettext import os -from abc import ABCMeta, abstractmethod, abstractproperty +from abc import ABCMeta, abstractmethod, abstractproperty from functools import wraps -from ansible.compat.six import with_metaclass +from ansible.compat.six import with_metaclass from ansible import constants as C from ansible.errors import AnsibleError from ansible.plugins import shell_loader @@ -233,3 +233,4 @@ class ConnectionBase(with_metaclass(ABCMeta, object)): f = self._play_context.connection_lockfd fcntl.lockf(f, fcntl.LOCK_UN) display.vvvv('CONNECTION: pid %d released lock on %d' % (os.getpid(), f)) + diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index a2abcf20aee..cce29824e1a 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -19,7 +19,12 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type +from ansible.compat.six import text_type + +import base64 import fcntl +import hmac +import operator import os import pipes import pty @@ -28,9 +33,13 @@ import shlex import subprocess import time +from hashlib import md5, sha1, sha256 + from ansible import constants as C from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound from ansible.plugins.connection import ConnectionBase +from ansible.utils.boolean import boolean +from ansible.utils.connection import get_smart_connection_type from ansible.utils.path import unfrackpath, makedirs_safe from ansible.utils.unicode import to_bytes, to_unicode @@ -41,7 +50,128 @@ except ImportError: display = Display() SSHPASS_AVAILABLE = None +HASHED_KEY_MAGIC = "|1|" + +def split_args(argstring): + """ + Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a + list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to + the argument list. The list will not contain any empty elements. + """ + return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()] + +def get_ssh_opts(play_context): + # FIXME: caching may help here + opts_dict = dict() + try: + cmd = ['ssh', '-G', play_context.remote_addr] + res = subprocess.check_output(cmd) + for line in res.split('\n'): + if ' ' in line: + (key, val) = line.split(' ', 1) + else: + key = line + val = '' + opts_dict[key.lower()] = val + + # next, we manually override any options that are being + # set via ssh_args or due to the fact that `ssh -G` doesn't + # actually use the options set via -o + for opt in ['ssh_args', 'ssh_common_args', 'ssh_extra_args']: + attr = getattr(play_context, opt, None) + if attr is not None: + args = split_args(attr) + for arg in args: + if '=' in arg: + (key, val) = arg.split('=', 1) + opts_dict[key.lower()] = val + + return opts_dict + except subprocess.CalledProcessError: + return dict() + +def host_in_known_hosts(host, ssh_opts): + # the setting from the ssh_opts may actually be multiple files, so + # we use shlex.split and simply take the first one specified + user_host_file = os.path.expanduser(shlex.split(ssh_opts.get('userknownhostsfile', '~/.ssh/known_hosts'))[0]) + + host_file_list = [] + host_file_list.append(user_host_file) + host_file_list.append("/etc/ssh/ssh_known_hosts") + host_file_list.append("/etc/ssh/ssh_known_hosts2") + + hfiles_not_found = 0 + for hf in host_file_list: + if not os.path.exists(hf): + continue + try: + host_fh = open(hf) + except (OSError, IOError) as e: + continue + else: + data = host_fh.read() + host_fh.close() + + for line in data.split("\n"): + line = line.strip() + if line is None or " " not in line: + continue + tokens = line.split() + if not tokens: + continue + if tokens[0].find(HASHED_KEY_MAGIC) == 0: + # this is a hashed known host entry + try: + (kn_salt, kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|",2) + hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1) + hash.update(host) + if hash.digest() == kn_host.decode('base64'): + return True + except: + # invalid hashed host key, skip it + continue + else: + # standard host file entry + if host in tokens[0]: + return True + + return False + +def fetch_ssh_host_key(play_context, ssh_opts): + keyscan_cmd = ['ssh-keyscan'] + + if play_context.port: + keyscan_cmd.extend(['-p', text_type(play_context.port)]) + + if boolean(ssh_opts.get('hashknownhosts', 'no')): + keyscan_cmd.append('-H') + keyscan_cmd.append(play_context.remote_addr) + + p = subprocess.Popen(keyscan_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) + (stdout, stderr) = p.communicate() + if stdout == '': + raise AnsibleConnectionFailure("Failed to connect to the host to fetch the host key: %s." % stderr) + else: + return stdout + +def add_host_key(host_key, ssh_opts): + # the setting from the ssh_opts may actually be multiple files, so + # we use shlex.split and simply take the first one specified + user_known_hosts = os.path.expanduser(shlex.split(ssh_opts.get('userknownhostsfile', '~/.ssh/known_hosts'))[0]) + user_ssh_dir = os.path.dirname(user_known_hosts) + + if not os.path.exists(user_ssh_dir): + raise AnsibleError("the user ssh directory does not exist: %s" % user_ssh_dir) + elif not os.path.isdir(user_ssh_dir): + raise AnsibleError("%s is not a directory" % user_ssh_dir) + + try: + display.vv("adding to known_hosts file: %s" % user_known_hosts) + with open(user_known_hosts, 'a') as f: + f.write(host_key) + except (OSError, IOError) as e: + raise AnsibleError("error when trying to access the known hosts file: '%s', error was: %s" % (user_known_hosts, text_type(e))) class Connection(ConnectionBase): ''' ssh based connections ''' @@ -62,6 +192,56 @@ class Connection(ConnectionBase): def _connect(self): return self + @staticmethod + def fetch_and_store_key(host, play_context): + ssh_opts = get_ssh_opts(play_context) + if not host_in_known_hosts(play_context.remote_addr, ssh_opts): + display.debug("host %s does not have a known host key, fetching it" % host) + + # build the list of valid host key types, for use later as we scan for keys. + # we also use this to determine the most preferred key when multiple keys are available + valid_host_key_types = [x.lower() for x in ssh_opts.get('hostbasedkeytypes', '').split(',')] + + # attempt to fetch the key with ssh-keyscan. More than one key may be + # returned, so we save all and use the above list to determine which + host_key_data = fetch_ssh_host_key(play_context, ssh_opts).strip().split('\n') + host_keys = dict() + for host_key in host_key_data: + (host_info, key_type, key_hash) = host_key.strip().split(' ', 3) + key_type = key_type.lower() + if key_type in valid_host_key_types and key_type not in host_keys: + host_keys[key_type.lower()] = host_key + + if len(host_keys) == 0: + raise AnsibleConnectionFailure("none of the available host keys found were in the HostBasedKeyTypes configuration option") + + # now we determine the preferred key by sorting the above dict on the + # index of the key type in the valid keys list + preferred_key = sorted(host_keys.items(), cmp=lambda x,y: cmp(valid_host_key_types.index(x), valid_host_key_types.index(y)), key=operator.itemgetter(0))[0] + + # shamelessly copied from here: + # https://github.com/ojarva/python-sshpubkeys/blob/master/sshpubkeys/__init__.py#L39 + # (which shamelessly copied it from somewhere else...) + (host_info, key_type, key_hash) = preferred_key[1].strip().split(' ', 3) + decoded_key = key_hash.decode('base64') + fp_plain = md5(decoded_key).hexdigest() + key_data = ':'.join(a+b for a, b in zip(fp_plain[::2], fp_plain[1::2])) + + # prompt the user to add the key + # if yes, add it, otherwise raise AnsibleConnectionFailure + display.display("\nThe authenticity of host %s (%s) can't be established." % (host.name, play_context.remote_addr)) + display.display("%s key fingerprint is SHA256:%s." % (key_type.upper(), sha256(decoded_key).digest().encode('base64').strip())) + display.display("%s key fingerprint is MD5:%s." % (key_type.upper(), key_data)) + response = display.prompt("Are you sure you want to continue connecting (yes/no)? ") + display.display("") + if boolean(response): + add_host_key(host_key, ssh_opts) + return True + else: + raise AnsibleConnectionFailure("Host key validation failed.") + + return False + @staticmethod def _sshpass_available(): global SSHPASS_AVAILABLE @@ -100,15 +280,6 @@ class Connection(ConnectionBase): return controlpersist, controlpath - @staticmethod - def _split_args(argstring): - """ - Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a - list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to - the argument list. The list will not contain any empty elements. - """ - return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()] - def _add_args(self, explanation, args): """ Adds the given args to self._command and displays a caller-supplied @@ -157,7 +328,7 @@ class Connection(ConnectionBase): # Next, we add [ssh_connection]ssh_args from ansible.cfg. if self._play_context.ssh_args: - args = self._split_args(self._play_context.ssh_args) + args = split_args(self._play_context.ssh_args) self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings @@ -210,7 +381,7 @@ class Connection(ConnectionBase): for opt in ['ssh_common_args', binary + '_extra_args']: attr = getattr(self._play_context, opt, None) if attr is not None: - args = self._split_args(attr) + args = split_args(attr) self._add_args("PlayContext set %s" % opt, args) # Check if ControlPersist is enabled and add a ControlPath if one hasn't diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index 7b2a3794efc..e460708f906 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -29,7 +29,7 @@ import zlib from jinja2.exceptions import UndefinedError from ansible import constants as C -from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable +from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleConnectionFailure from ansible.executor.play_iterator import PlayIterator from ansible.executor.process.worker import WorkerProcess from ansible.executor.task_result import TaskResult @@ -39,6 +39,7 @@ from ansible.playbook.helpers import load_list_of_blocks from ansible.playbook.included_file import IncludedFile from ansible.plugins import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader from ansible.template import Templar +from ansible.utils.connection import get_smart_connection_type from ansible.vars.unsafe_proxy import wrap_var try: @@ -139,6 +140,33 @@ class StrategyBase: display.debug("entering _queue_task() for %s/%s" % (host, task)) + if C.HOST_KEY_CHECKING and not host.has_hostkey: + # caveat here, regarding with loops. It is assumed that none of the connection + # related variables would contain '{{item}}' as it would cause some really + # weird loops. As is, if someone did something odd like that they would need + # to disable host key checking + templar = Templar(loader=self._loader, variables=task_vars) + temp_pc = play_context.set_task_and_variable_override(task=task, variables=task_vars, templar=templar) + temp_pc.post_validate(templar) + if temp_pc.connection in ('smart', 'ssh') and get_smart_connection_type(temp_pc) == 'ssh': + try: + # get the ssh connection plugin's class, and use its builtin + # static method to fetch and save the key to the known_hosts file + ssh_conn = connection_loader.get('ssh', class_only=True) + ssh_conn.fetch_and_store_key(host, temp_pc) + except AnsibleConnectionFailure as e: + # if that fails, add the host to the list of unreachable + # hosts and send the appropriate callback + self._tqm._unreachable_hosts[host.name] = True + self._tqm._stats.increment('dark', host.name) + tr = TaskResult(host=host, task=task, return_data=dict(msg=text_type(e))) + self._tqm.send_callback('v2_runner_on_unreachable', tr) + return + + # finally, we set the has_hostkey flag to true for this + # host so we can skip it quickly in the future + host.has_hostkey = True + task_vars['hostvars'] = self._tqm.hostvars # and then queue the new task display.debug("%s - putting task (%s) in queue" % (host, task)) diff --git a/lib/ansible/utils/connection.py b/lib/ansible/utils/connection.py new file mode 100644 index 00000000000..6f6b405640e --- /dev/null +++ b/lib/ansible/utils/connection.py @@ -0,0 +1,50 @@ +# (c) 2015, Ansible, Inc. +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import subprocess +import sys + + +__all__ = ['get_smart_connection_type'] + +def get_smart_connection_type(play_context): + ''' + Uses the ssh command with the ControlPersist option while checking + for an error to determine if we should use ssh or paramiko. Also + may take other factors into account. + ''' + + conn_type = 'ssh' + if sys.platform.startswith('darwin') and play_context.password: + # due to a current bug in sshpass on OSX, which can trigger + # a kernel panic even for non-privileged users, we revert to + # paramiko on that OS when a SSH password is specified + conn_type = "paramiko" + else: + # see if SSH can support ControlPersist if not use paramiko + try: + cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (out, err) = cmd.communicate() + if "Bad configuration option" in err or "Usage:" in err: + conn_type = "paramiko" + except OSError: + conn_type = "paramiko" + + return conn_type From d7f2f606e179cf0df4d308a0055b4ad62207b47c Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 16 Dec 2015 21:49:33 -0500 Subject: [PATCH 0164/1113] Add has_hostkey to mock objects to fix broken unit tests --- test/units/plugins/strategies/test_strategy_base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/units/plugins/strategies/test_strategy_base.py b/test/units/plugins/strategies/test_strategy_base.py index 53e243f926b..8d1a1e8adab 100644 --- a/test/units/plugins/strategies/test_strategy_base.py +++ b/test/units/plugins/strategies/test_strategy_base.py @@ -76,6 +76,7 @@ class TestStrategyBase(unittest.TestCase): for i in range(0, 5): mock_host = MagicMock() mock_host.name = "host%02d" % (i+1) + mock_host.has_hostkey = True mock_hosts.append(mock_host) mock_inventory = MagicMock() @@ -111,6 +112,7 @@ class TestStrategyBase(unittest.TestCase): fake_loader = DictDataLoader() mock_var_manager = MagicMock() mock_host = MagicMock() + mock_host.has_hostkey = True mock_inventory = MagicMock() mock_options = MagicMock() mock_options.module_path = None @@ -171,6 +173,7 @@ class TestStrategyBase(unittest.TestCase): mock_host = MagicMock() mock_host.name = 'test01' mock_host.vars = dict() + mock_host.has_hostkey = True mock_task = MagicMock() mock_task._role = None @@ -347,6 +350,7 @@ class TestStrategyBase(unittest.TestCase): mock_host = MagicMock(Host) mock_host.name = "test01" + mock_host.has_hostkey = True mock_inventory = MagicMock() mock_inventory.get_hosts.return_value = [mock_host] From d9c74536be63cedc3dd1711c73844827990e898d Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 09:44:40 -0500 Subject: [PATCH 0165/1113] Fix handling of environment inheritence, and template each inherited env Environments were not being templated individually, so a variable environment value was causing the exception regarding dicts to be hit. Also, environments as inherited were coming through with the tasks listed first, followed by the parents, so they were being merged backwards. Reversing the list of environments fixed this. --- lib/ansible/plugins/action/__init__.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index 254bab476bb..e9b18651d66 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -151,14 +151,19 @@ class ActionBase(with_metaclass(ABCMeta, object)): if not isinstance(environments, list): environments = [ environments ] + # the environments as inherited need to be reversed, to make + # sure we merge in the parent's values first so those in the + # block then task 'win' in precedence + environments.reverse() for environment in environments: if environment is None: continue - if not isinstance(environment, dict): - raise AnsibleError("environment must be a dictionary, received %s (%s)" % (environment, type(environment))) + temp_environment = self._templar.template(environment) + if not isinstance(temp_environment, dict): + raise AnsibleError("environment must be a dictionary, received %s (%s)" % (temp_environment, type(temp_environment))) # very deliberately using update here instead of combine_vars, as # these environment settings should not need to merge sub-dicts - final_environment.update(environment) + final_environment.update(temp_environment) final_environment = self._templar.template(final_environment) return self._connection._shell.env_prefix(**final_environment) From dd3d04e96ab30bb0df89b5e3ab1ac9a9d91d5841 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 10:31:14 -0500 Subject: [PATCH 0166/1113] Adding pip install of virtualenv to test deps integration role --- .../roles/ansible_test_deps/tasks/main.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index f71128921d9..5f75085d920 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -10,6 +10,9 @@ ignore_errors: true when: ansible_os_family == 'Debian' +- name: Install virtualenv + pip: name=virtualenv state=present + - name: Install RH epel yum: name="epel-release" state=installed sudo: true From 0b1ad8d4905fa83eddbc08e2a3dd395aa99b8aed Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 10:41:58 -0500 Subject: [PATCH 0167/1113] Switch virtualenv dep installation from pip to package manager --- .../roles/ansible_test_deps/tasks/main.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 5f75085d920..c9cb256a35c 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -11,7 +11,12 @@ when: ansible_os_family == 'Debian' - name: Install virtualenv - pip: name=virtualenv state=present + yum: name=python-virtualenv state=installed + when: ansible_os_family == 'RedHat' + +- name: Install virtualenv + apt: name=python-virtualenv state=installed + when: ansible_os_family == 'Debian' - name: Install RH epel yum: name="epel-release" state=installed From cf3d503f790ddf7ba74bc768bd2faad7a550f5ee Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 11:00:54 -0500 Subject: [PATCH 0168/1113] Moving apt cache update to top to ensure cache is updated before deps installed --- .../roles/ansible_test_deps/tasks/main.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index c9cb256a35c..c2fc955a164 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -1,5 +1,8 @@ --- +- apt: update_cache=yes + when: ansible_os_family == 'Debian' + - name: Install sudo yum: name=sudo state=installed ignore_errors: true @@ -42,9 +45,6 @@ - libselinux-python when: ansible_os_family == 'RedHat' -- apt: update_cache=yes - when: ansible_os_family == 'Debian' - - name: Install Debian ansible dependencies apt: name="{{ item }}" state=installed update_cache=yes sudo: true From 26bbabcfba637e17b36bb20d064c390cf0461e4d Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 11:15:06 -0500 Subject: [PATCH 0169/1113] Consolidating package lines for virtualenv install in test deps integration --- .../roles/ansible_test_deps/tasks/main.yml | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index c2fc955a164..ac133730ec5 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -13,14 +13,6 @@ ignore_errors: true when: ansible_os_family == 'Debian' -- name: Install virtualenv - yum: name=python-virtualenv state=installed - when: ansible_os_family == 'RedHat' - -- name: Install virtualenv - apt: name=python-virtualenv state=installed - when: ansible_os_family == 'Debian' - - name: Install RH epel yum: name="epel-release" state=installed sudo: true @@ -43,6 +35,7 @@ - gcc - python-devel - libselinux-python + - python-virtualenv when: ansible_os_family == 'RedHat' - name: Install Debian ansible dependencies @@ -57,6 +50,7 @@ - git - unzip - python-dev + - python-virtualenv when: ansible_os_family == 'Debian' - name: Install ubuntu 12.04 ansible dependencies From 21c127c5813c800204c729d84188f1e6d7bae3e7 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 12:06:17 -0500 Subject: [PATCH 0170/1113] Fixing bugs in ssh known_host fetching * If remote_addr is not set in the PlayContext, use the host.address field instead (which is how the action plugin works) Fixes #13581 --- lib/ansible/plugins/connection/ssh.py | 29 +++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index cce29824e1a..c24d1667348 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -60,11 +60,15 @@ def split_args(argstring): """ return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()] -def get_ssh_opts(play_context): +def get_ssh_opts(host, play_context): # FIXME: caching may help here opts_dict = dict() try: - cmd = ['ssh', '-G', play_context.remote_addr] + remote_addr = play_context.remote_addr + if not remote_addr: + remote_addr = host.address + + cmd = ['ssh', '-G', remote_addr] res = subprocess.check_output(cmd) for line in res.split('\n'): if ' ' in line: @@ -137,7 +141,7 @@ def host_in_known_hosts(host, ssh_opts): return False -def fetch_ssh_host_key(play_context, ssh_opts): +def fetch_ssh_host_key(host, play_context, ssh_opts): keyscan_cmd = ['ssh-keyscan'] if play_context.port: @@ -146,7 +150,11 @@ def fetch_ssh_host_key(play_context, ssh_opts): if boolean(ssh_opts.get('hashknownhosts', 'no')): keyscan_cmd.append('-H') - keyscan_cmd.append(play_context.remote_addr) + remote_addr = play_context.remote_addr + if not remote_addr: + remote_addr = host.address + + keyscan_cmd.append(remote_addr) p = subprocess.Popen(keyscan_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) (stdout, stderr) = p.communicate() @@ -194,8 +202,13 @@ class Connection(ConnectionBase): @staticmethod def fetch_and_store_key(host, play_context): - ssh_opts = get_ssh_opts(play_context) - if not host_in_known_hosts(play_context.remote_addr, ssh_opts): + ssh_opts = get_ssh_opts(host, play_context) + + remote_addr = play_context.remote_addr + if not remote_addr: + remote_addr = host.address + + if not host_in_known_hosts(remote_addr, ssh_opts): display.debug("host %s does not have a known host key, fetching it" % host) # build the list of valid host key types, for use later as we scan for keys. @@ -204,7 +217,7 @@ class Connection(ConnectionBase): # attempt to fetch the key with ssh-keyscan. More than one key may be # returned, so we save all and use the above list to determine which - host_key_data = fetch_ssh_host_key(play_context, ssh_opts).strip().split('\n') + host_key_data = fetch_ssh_host_key(host, play_context, ssh_opts).strip().split('\n') host_keys = dict() for host_key in host_key_data: (host_info, key_type, key_hash) = host_key.strip().split(' ', 3) @@ -229,7 +242,7 @@ class Connection(ConnectionBase): # prompt the user to add the key # if yes, add it, otherwise raise AnsibleConnectionFailure - display.display("\nThe authenticity of host %s (%s) can't be established." % (host.name, play_context.remote_addr)) + display.display("\nThe authenticity of host %s (%s) can't be established." % (host.name, remote_addr)) display.display("%s key fingerprint is SHA256:%s." % (key_type.upper(), sha256(decoded_key).digest().encode('base64').strip())) display.display("%s key fingerprint is MD5:%s." % (key_type.upper(), key_data)) response = display.prompt("Are you sure you want to continue connecting (yes/no)? ") From 8db4415e2e95e5993822b4f75e700dd14a928ad9 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 17 Dec 2015 12:25:29 -0500 Subject: [PATCH 0171/1113] changed test to use filter for accurate reporting --- test/integration/roles/test_service/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/roles/test_service/tasks/main.yml b/test/integration/roles/test_service/tasks/main.yml index c0e590643c9..8b61d62143a 100644 --- a/test/integration/roles/test_service/tasks/main.yml +++ b/test/integration/roles/test_service/tasks/main.yml @@ -98,7 +98,7 @@ - name: assert that the broken test failed assert: that: - - "broken_enable_result.failed == True" + - "broken_enable_result|failed" - name: remove the test daemon script file: path=/usr/sbin/ansible_test_service state=absent From 586208234cc921acc70fbe1fff211707ceba0c7a Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 12:42:53 -0500 Subject: [PATCH 0172/1113] Revert "Fixing bugs in ssh known_host fetching" This reverts commit 21c127c5813c800204c729d84188f1e6d7bae3e7. --- lib/ansible/plugins/connection/ssh.py | 29 ++++++++------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index c24d1667348..cce29824e1a 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -60,15 +60,11 @@ def split_args(argstring): """ return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()] -def get_ssh_opts(host, play_context): +def get_ssh_opts(play_context): # FIXME: caching may help here opts_dict = dict() try: - remote_addr = play_context.remote_addr - if not remote_addr: - remote_addr = host.address - - cmd = ['ssh', '-G', remote_addr] + cmd = ['ssh', '-G', play_context.remote_addr] res = subprocess.check_output(cmd) for line in res.split('\n'): if ' ' in line: @@ -141,7 +137,7 @@ def host_in_known_hosts(host, ssh_opts): return False -def fetch_ssh_host_key(host, play_context, ssh_opts): +def fetch_ssh_host_key(play_context, ssh_opts): keyscan_cmd = ['ssh-keyscan'] if play_context.port: @@ -150,11 +146,7 @@ def fetch_ssh_host_key(host, play_context, ssh_opts): if boolean(ssh_opts.get('hashknownhosts', 'no')): keyscan_cmd.append('-H') - remote_addr = play_context.remote_addr - if not remote_addr: - remote_addr = host.address - - keyscan_cmd.append(remote_addr) + keyscan_cmd.append(play_context.remote_addr) p = subprocess.Popen(keyscan_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) (stdout, stderr) = p.communicate() @@ -202,13 +194,8 @@ class Connection(ConnectionBase): @staticmethod def fetch_and_store_key(host, play_context): - ssh_opts = get_ssh_opts(host, play_context) - - remote_addr = play_context.remote_addr - if not remote_addr: - remote_addr = host.address - - if not host_in_known_hosts(remote_addr, ssh_opts): + ssh_opts = get_ssh_opts(play_context) + if not host_in_known_hosts(play_context.remote_addr, ssh_opts): display.debug("host %s does not have a known host key, fetching it" % host) # build the list of valid host key types, for use later as we scan for keys. @@ -217,7 +204,7 @@ class Connection(ConnectionBase): # attempt to fetch the key with ssh-keyscan. More than one key may be # returned, so we save all and use the above list to determine which - host_key_data = fetch_ssh_host_key(host, play_context, ssh_opts).strip().split('\n') + host_key_data = fetch_ssh_host_key(play_context, ssh_opts).strip().split('\n') host_keys = dict() for host_key in host_key_data: (host_info, key_type, key_hash) = host_key.strip().split(' ', 3) @@ -242,7 +229,7 @@ class Connection(ConnectionBase): # prompt the user to add the key # if yes, add it, otherwise raise AnsibleConnectionFailure - display.display("\nThe authenticity of host %s (%s) can't be established." % (host.name, remote_addr)) + display.display("\nThe authenticity of host %s (%s) can't be established." % (host.name, play_context.remote_addr)) display.display("%s key fingerprint is SHA256:%s." % (key_type.upper(), sha256(decoded_key).digest().encode('base64').strip())) display.display("%s key fingerprint is MD5:%s." % (key_type.upper(), key_data)) response = display.prompt("Are you sure you want to continue connecting (yes/no)? ") From e5462194261c7b55ccdf41adc4525dc86a1a34c1 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 12:43:36 -0500 Subject: [PATCH 0173/1113] Revert "Enable host_key checking at the strategy level" This reverts commit 1a6d660d7e285cceec474952a33af4d8dffd0a8d. --- lib/ansible/executor/task_executor.py | 17 +- lib/ansible/inventory/host.py | 11 +- lib/ansible/plugins/connection/__init__.py | 5 +- lib/ansible/plugins/connection/ssh.py | 193 ++------------------- lib/ansible/plugins/strategy/__init__.py | 30 +--- lib/ansible/utils/connection.py | 50 ------ 6 files changed, 33 insertions(+), 273 deletions(-) delete mode 100644 lib/ansible/utils/connection.py diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index 2623bc775b2..5d7430fad25 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -32,7 +32,6 @@ from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVar from ansible.playbook.conditional import Conditional from ansible.playbook.task import Task from ansible.template import Templar -from ansible.utils.connection import get_smart_connection_type from ansible.utils.encrypt import key_for_hostname from ansible.utils.listify import listify_lookup_plugin_terms from ansible.utils.unicode import to_unicode @@ -565,7 +564,21 @@ class TaskExecutor: conn_type = self._play_context.connection if conn_type == 'smart': - conn_type = get_smart_connection_type(self._play_context) + conn_type = 'ssh' + if sys.platform.startswith('darwin') and self._play_context.password: + # due to a current bug in sshpass on OSX, which can trigger + # a kernel panic even for non-privileged users, we revert to + # paramiko on that OS when a SSH password is specified + conn_type = "paramiko" + else: + # see if SSH can support ControlPersist if not use paramiko + try: + cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (out, err) = cmd.communicate() + if "Bad configuration option" in err or "Usage:" in err: + conn_type = "paramiko" + except OSError: + conn_type = "paramiko" connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin) if not connection: diff --git a/lib/ansible/inventory/host.py b/lib/ansible/inventory/host.py index 70f9f57b5f1..6263dcbc80d 100644 --- a/lib/ansible/inventory/host.py +++ b/lib/ansible/inventory/host.py @@ -57,7 +57,6 @@ class Host: name=self.name, vars=self.vars.copy(), address=self.address, - has_hostkey=self.has_hostkey, uuid=self._uuid, gathered_facts=self._gathered_facts, groups=groups, @@ -66,11 +65,10 @@ class Host: def deserialize(self, data): self.__init__() - self.name = data.get('name') - self.vars = data.get('vars', dict()) - self.address = data.get('address', '') - self.has_hostkey = data.get('has_hostkey', False) - self._uuid = data.get('uuid', uuid.uuid4()) + self.name = data.get('name') + self.vars = data.get('vars', dict()) + self.address = data.get('address', '') + self._uuid = data.get('uuid', uuid.uuid4()) groups = data.get('groups', []) for group_data in groups: @@ -91,7 +89,6 @@ class Host: self._gathered_facts = False self._uuid = uuid.uuid4() - self.has_hostkey = False def __repr__(self): return self.get_name() diff --git a/lib/ansible/plugins/connection/__init__.py b/lib/ansible/plugins/connection/__init__.py index 7fc19c8c195..06616bac4ca 100644 --- a/lib/ansible/plugins/connection/__init__.py +++ b/lib/ansible/plugins/connection/__init__.py @@ -23,11 +23,11 @@ __metaclass__ = type import fcntl import gettext import os - from abc import ABCMeta, abstractmethod, abstractproperty -from functools import wraps +from functools import wraps from ansible.compat.six import with_metaclass + from ansible import constants as C from ansible.errors import AnsibleError from ansible.plugins import shell_loader @@ -233,4 +233,3 @@ class ConnectionBase(with_metaclass(ABCMeta, object)): f = self._play_context.connection_lockfd fcntl.lockf(f, fcntl.LOCK_UN) display.vvvv('CONNECTION: pid %d released lock on %d' % (os.getpid(), f)) - diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index cce29824e1a..a2abcf20aee 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -19,12 +19,7 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -from ansible.compat.six import text_type - -import base64 import fcntl -import hmac -import operator import os import pipes import pty @@ -33,13 +28,9 @@ import shlex import subprocess import time -from hashlib import md5, sha1, sha256 - from ansible import constants as C from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound from ansible.plugins.connection import ConnectionBase -from ansible.utils.boolean import boolean -from ansible.utils.connection import get_smart_connection_type from ansible.utils.path import unfrackpath, makedirs_safe from ansible.utils.unicode import to_bytes, to_unicode @@ -50,128 +41,7 @@ except ImportError: display = Display() SSHPASS_AVAILABLE = None -HASHED_KEY_MAGIC = "|1|" - -def split_args(argstring): - """ - Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a - list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to - the argument list. The list will not contain any empty elements. - """ - return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()] - -def get_ssh_opts(play_context): - # FIXME: caching may help here - opts_dict = dict() - try: - cmd = ['ssh', '-G', play_context.remote_addr] - res = subprocess.check_output(cmd) - for line in res.split('\n'): - if ' ' in line: - (key, val) = line.split(' ', 1) - else: - key = line - val = '' - opts_dict[key.lower()] = val - - # next, we manually override any options that are being - # set via ssh_args or due to the fact that `ssh -G` doesn't - # actually use the options set via -o - for opt in ['ssh_args', 'ssh_common_args', 'ssh_extra_args']: - attr = getattr(play_context, opt, None) - if attr is not None: - args = split_args(attr) - for arg in args: - if '=' in arg: - (key, val) = arg.split('=', 1) - opts_dict[key.lower()] = val - - return opts_dict - except subprocess.CalledProcessError: - return dict() - -def host_in_known_hosts(host, ssh_opts): - # the setting from the ssh_opts may actually be multiple files, so - # we use shlex.split and simply take the first one specified - user_host_file = os.path.expanduser(shlex.split(ssh_opts.get('userknownhostsfile', '~/.ssh/known_hosts'))[0]) - - host_file_list = [] - host_file_list.append(user_host_file) - host_file_list.append("/etc/ssh/ssh_known_hosts") - host_file_list.append("/etc/ssh/ssh_known_hosts2") - - hfiles_not_found = 0 - for hf in host_file_list: - if not os.path.exists(hf): - continue - try: - host_fh = open(hf) - except (OSError, IOError) as e: - continue - else: - data = host_fh.read() - host_fh.close() - - for line in data.split("\n"): - line = line.strip() - if line is None or " " not in line: - continue - tokens = line.split() - if not tokens: - continue - if tokens[0].find(HASHED_KEY_MAGIC) == 0: - # this is a hashed known host entry - try: - (kn_salt, kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|",2) - hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1) - hash.update(host) - if hash.digest() == kn_host.decode('base64'): - return True - except: - # invalid hashed host key, skip it - continue - else: - # standard host file entry - if host in tokens[0]: - return True - - return False - -def fetch_ssh_host_key(play_context, ssh_opts): - keyscan_cmd = ['ssh-keyscan'] - - if play_context.port: - keyscan_cmd.extend(['-p', text_type(play_context.port)]) - - if boolean(ssh_opts.get('hashknownhosts', 'no')): - keyscan_cmd.append('-H') - keyscan_cmd.append(play_context.remote_addr) - - p = subprocess.Popen(keyscan_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) - (stdout, stderr) = p.communicate() - if stdout == '': - raise AnsibleConnectionFailure("Failed to connect to the host to fetch the host key: %s." % stderr) - else: - return stdout - -def add_host_key(host_key, ssh_opts): - # the setting from the ssh_opts may actually be multiple files, so - # we use shlex.split and simply take the first one specified - user_known_hosts = os.path.expanduser(shlex.split(ssh_opts.get('userknownhostsfile', '~/.ssh/known_hosts'))[0]) - user_ssh_dir = os.path.dirname(user_known_hosts) - - if not os.path.exists(user_ssh_dir): - raise AnsibleError("the user ssh directory does not exist: %s" % user_ssh_dir) - elif not os.path.isdir(user_ssh_dir): - raise AnsibleError("%s is not a directory" % user_ssh_dir) - - try: - display.vv("adding to known_hosts file: %s" % user_known_hosts) - with open(user_known_hosts, 'a') as f: - f.write(host_key) - except (OSError, IOError) as e: - raise AnsibleError("error when trying to access the known hosts file: '%s', error was: %s" % (user_known_hosts, text_type(e))) class Connection(ConnectionBase): ''' ssh based connections ''' @@ -192,56 +62,6 @@ class Connection(ConnectionBase): def _connect(self): return self - @staticmethod - def fetch_and_store_key(host, play_context): - ssh_opts = get_ssh_opts(play_context) - if not host_in_known_hosts(play_context.remote_addr, ssh_opts): - display.debug("host %s does not have a known host key, fetching it" % host) - - # build the list of valid host key types, for use later as we scan for keys. - # we also use this to determine the most preferred key when multiple keys are available - valid_host_key_types = [x.lower() for x in ssh_opts.get('hostbasedkeytypes', '').split(',')] - - # attempt to fetch the key with ssh-keyscan. More than one key may be - # returned, so we save all and use the above list to determine which - host_key_data = fetch_ssh_host_key(play_context, ssh_opts).strip().split('\n') - host_keys = dict() - for host_key in host_key_data: - (host_info, key_type, key_hash) = host_key.strip().split(' ', 3) - key_type = key_type.lower() - if key_type in valid_host_key_types and key_type not in host_keys: - host_keys[key_type.lower()] = host_key - - if len(host_keys) == 0: - raise AnsibleConnectionFailure("none of the available host keys found were in the HostBasedKeyTypes configuration option") - - # now we determine the preferred key by sorting the above dict on the - # index of the key type in the valid keys list - preferred_key = sorted(host_keys.items(), cmp=lambda x,y: cmp(valid_host_key_types.index(x), valid_host_key_types.index(y)), key=operator.itemgetter(0))[0] - - # shamelessly copied from here: - # https://github.com/ojarva/python-sshpubkeys/blob/master/sshpubkeys/__init__.py#L39 - # (which shamelessly copied it from somewhere else...) - (host_info, key_type, key_hash) = preferred_key[1].strip().split(' ', 3) - decoded_key = key_hash.decode('base64') - fp_plain = md5(decoded_key).hexdigest() - key_data = ':'.join(a+b for a, b in zip(fp_plain[::2], fp_plain[1::2])) - - # prompt the user to add the key - # if yes, add it, otherwise raise AnsibleConnectionFailure - display.display("\nThe authenticity of host %s (%s) can't be established." % (host.name, play_context.remote_addr)) - display.display("%s key fingerprint is SHA256:%s." % (key_type.upper(), sha256(decoded_key).digest().encode('base64').strip())) - display.display("%s key fingerprint is MD5:%s." % (key_type.upper(), key_data)) - response = display.prompt("Are you sure you want to continue connecting (yes/no)? ") - display.display("") - if boolean(response): - add_host_key(host_key, ssh_opts) - return True - else: - raise AnsibleConnectionFailure("Host key validation failed.") - - return False - @staticmethod def _sshpass_available(): global SSHPASS_AVAILABLE @@ -280,6 +100,15 @@ class Connection(ConnectionBase): return controlpersist, controlpath + @staticmethod + def _split_args(argstring): + """ + Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a + list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to + the argument list. The list will not contain any empty elements. + """ + return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()] + def _add_args(self, explanation, args): """ Adds the given args to self._command and displays a caller-supplied @@ -328,7 +157,7 @@ class Connection(ConnectionBase): # Next, we add [ssh_connection]ssh_args from ansible.cfg. if self._play_context.ssh_args: - args = split_args(self._play_context.ssh_args) + args = self._split_args(self._play_context.ssh_args) self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings @@ -381,7 +210,7 @@ class Connection(ConnectionBase): for opt in ['ssh_common_args', binary + '_extra_args']: attr = getattr(self._play_context, opt, None) if attr is not None: - args = split_args(attr) + args = self._split_args(attr) self._add_args("PlayContext set %s" % opt, args) # Check if ControlPersist is enabled and add a ControlPath if one hasn't diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index e460708f906..7b2a3794efc 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -29,7 +29,7 @@ import zlib from jinja2.exceptions import UndefinedError from ansible import constants as C -from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleConnectionFailure +from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable from ansible.executor.play_iterator import PlayIterator from ansible.executor.process.worker import WorkerProcess from ansible.executor.task_result import TaskResult @@ -39,7 +39,6 @@ from ansible.playbook.helpers import load_list_of_blocks from ansible.playbook.included_file import IncludedFile from ansible.plugins import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader from ansible.template import Templar -from ansible.utils.connection import get_smart_connection_type from ansible.vars.unsafe_proxy import wrap_var try: @@ -140,33 +139,6 @@ class StrategyBase: display.debug("entering _queue_task() for %s/%s" % (host, task)) - if C.HOST_KEY_CHECKING and not host.has_hostkey: - # caveat here, regarding with loops. It is assumed that none of the connection - # related variables would contain '{{item}}' as it would cause some really - # weird loops. As is, if someone did something odd like that they would need - # to disable host key checking - templar = Templar(loader=self._loader, variables=task_vars) - temp_pc = play_context.set_task_and_variable_override(task=task, variables=task_vars, templar=templar) - temp_pc.post_validate(templar) - if temp_pc.connection in ('smart', 'ssh') and get_smart_connection_type(temp_pc) == 'ssh': - try: - # get the ssh connection plugin's class, and use its builtin - # static method to fetch and save the key to the known_hosts file - ssh_conn = connection_loader.get('ssh', class_only=True) - ssh_conn.fetch_and_store_key(host, temp_pc) - except AnsibleConnectionFailure as e: - # if that fails, add the host to the list of unreachable - # hosts and send the appropriate callback - self._tqm._unreachable_hosts[host.name] = True - self._tqm._stats.increment('dark', host.name) - tr = TaskResult(host=host, task=task, return_data=dict(msg=text_type(e))) - self._tqm.send_callback('v2_runner_on_unreachable', tr) - return - - # finally, we set the has_hostkey flag to true for this - # host so we can skip it quickly in the future - host.has_hostkey = True - task_vars['hostvars'] = self._tqm.hostvars # and then queue the new task display.debug("%s - putting task (%s) in queue" % (host, task)) diff --git a/lib/ansible/utils/connection.py b/lib/ansible/utils/connection.py deleted file mode 100644 index 6f6b405640e..00000000000 --- a/lib/ansible/utils/connection.py +++ /dev/null @@ -1,50 +0,0 @@ -# (c) 2015, Ansible, Inc. -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see . - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import subprocess -import sys - - -__all__ = ['get_smart_connection_type'] - -def get_smart_connection_type(play_context): - ''' - Uses the ssh command with the ControlPersist option while checking - for an error to determine if we should use ssh or paramiko. Also - may take other factors into account. - ''' - - conn_type = 'ssh' - if sys.platform.startswith('darwin') and play_context.password: - # due to a current bug in sshpass on OSX, which can trigger - # a kernel panic even for non-privileged users, we revert to - # paramiko on that OS when a SSH password is specified - conn_type = "paramiko" - else: - # see if SSH can support ControlPersist if not use paramiko - try: - cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (out, err) = cmd.communicate() - if "Bad configuration option" in err or "Usage:" in err: - conn_type = "paramiko" - except OSError: - conn_type = "paramiko" - - return conn_type From 1b5e7ce0253c896f5166b5ffd1c2614090cc75a1 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 17 Dec 2015 10:23:02 -0800 Subject: [PATCH 0174/1113] Update submodule refs --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 16a3bdaa7da..c75c0003697 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 16a3bdaa7da9e9f7c0572d3a3fdbfd79f29c2b9d +Subproject commit c75c0003697d00f52cedb68d4c1b05b7e95991e0 diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index 8ec4f95ffd6..06bdec0cac8 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit 8ec4f95ffd6d4e837cf0f3dd28649fb09afd0caf +Subproject commit 06bdec0cac86ef2339e0b4d8a4616ee24619956f From ce1febe28bb538c9d6db59449caf4da9dcf23f7e Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 17 Dec 2015 11:25:45 -0800 Subject: [PATCH 0175/1113] debug line needs var not msg --- test/integration/roles/test_get_url/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index 09ee34277a0..640c987790f 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -78,7 +78,7 @@ # If distros start backporting SNI, can make a new conditional based on whether this works: # python -c 'from ssl import SSLContext' -- debug: msg=get_url_result +- debug: var=get_url_result - name: Assert that SNI works with this python version assert: that: From bad1c173b87a7b68fc0ae79b35376fc31e8cc5d7 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 17 Dec 2015 11:36:36 -0800 Subject: [PATCH 0176/1113] Update core submodule for mysql_db fix --- lib/ansible/modules/core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index c75c0003697..b4a3fdd4933 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit c75c0003697d00f52cedb68d4c1b05b7e95991e0 +Subproject commit b4a3fdd493378853c0b6ab35d5d8bcf52612a4a0 From 8c6f56f982fce50d5b030928e425740a30d4f86c Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 17 Dec 2015 11:46:26 -0800 Subject: [PATCH 0177/1113] kennetreitz.org times out but www.kennethreitz.org is fine --- test/integration/roles/test_lookups/tasks/main.yml | 6 +++--- test/integration/roles/test_uri/tasks/main.yml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/test/integration/roles/test_lookups/tasks/main.yml b/test/integration/roles/test_lookups/tasks/main.yml index 5ca29e27c1e..3c5e066ee34 100644 --- a/test/integration/roles/test_lookups/tasks/main.yml +++ b/test/integration/roles/test_lookups/tasks/main.yml @@ -177,7 +177,7 @@ - name: Test that retrieving a url with invalid cert fails set_fact: - web_data: "{{ lookup('url', 'https://kennethreitz.org/') }}" + web_data: "{{ lookup('url', 'https://www.kennethreitz.org/') }}" ignore_errors: True register: url_invalid_cert @@ -188,9 +188,9 @@ - name: Test that retrieving a url with invalid cert with validate_certs=False works set_fact: - web_data: "{{ lookup('url', 'https://kennethreitz.org/', validate_certs=False) }}" + web_data: "{{ lookup('url', 'https://www.kennethreitz.org/', validate_certs=False) }}" register: url_no_validate_cert - assert: that: - - "'kennethreitz.org' in web_data" + - "'www.kennethreitz.org' in web_data" diff --git a/test/integration/roles/test_uri/tasks/main.yml b/test/integration/roles/test_uri/tasks/main.yml index 7300578982d..18229e6b7cf 100644 --- a/test/integration/roles/test_uri/tasks/main.yml +++ b/test/integration/roles/test_uri/tasks/main.yml @@ -94,7 +94,7 @@ - name: test https fetch to a site with mismatched hostname and certificate uri: - url: "https://kennethreitz.org/" + url: "https://www.kennethreitz.org/" dest: "{{ output_dir }}/shouldnotexist.html" ignore_errors: True register: result @@ -117,7 +117,7 @@ - name: test https fetch to a site with mismatched hostname and certificate and validate_certs=no get_url: - url: "https://kennethreitz.org/" + url: "https://www.kennethreitz.org/" dest: "{{ output_dir }}/kreitz.html" validate_certs: no register: result From 5929ffc7c3b79b830edeebdb8542b53c3c0a15b3 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 16:01:56 -0500 Subject: [PATCH 0178/1113] Make --list-tasks respect tags Also makes the output closer to the appearance of v1 Fixes #13260 --- lib/ansible/cli/playbook.py | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py index a9c0ed018dc..e51d5d3993b 100644 --- a/lib/ansible/cli/playbook.py +++ b/lib/ansible/cli/playbook.py @@ -30,6 +30,7 @@ from ansible.errors import AnsibleError, AnsibleOptionsError from ansible.executor.playbook_executor import PlaybookExecutor from ansible.inventory import Inventory from ansible.parsing.dataloader import DataLoader +from ansible.playbook.play_context import PlayContext from ansible.utils.vars import load_extra_vars from ansible.vars import VariableManager @@ -152,18 +153,10 @@ class PlaybookCLI(CLI): for p in results: display.display('\nplaybook: %s' % p['playbook']) - i = 1 - for play in p['plays']: - if play.name: - playname = play.name - else: - playname = '#' + str(i) - - msg = "\n PLAY: %s" % (playname) - mytags = set() - if self.options.listtags and play.tags: - mytags = mytags.union(set(play.tags)) - msg += ' TAGS: [%s]' % (','.join(mytags)) + for idx, play in enumerate(p['plays']): + msg = "\n play #%d (%s): %s" % (idx + 1, ','.join(play.hosts), play.name) + mytags = set(play.tags) + msg += ' TAGS: [%s]' % (','.join(mytags)) if self.options.listhosts: playhosts = set(inventory.get_hosts(play.hosts)) @@ -176,20 +169,21 @@ class PlaybookCLI(CLI): if self.options.listtags or self.options.listtasks: taskmsg = ' tasks:' + all_vars = variable_manager.get_vars(loader=loader, play=play) + play_context = PlayContext(play=play, options=self.options) for block in play.compile(): + block = block.filter_tagged_tasks(play_context, all_vars) if not block.has_tasks(): continue - j = 1 for task in block.block: - taskmsg += "\n %s" % task - if self.options.listtags and task.tags: - taskmsg += " TAGS: [%s]" % ','.join(mytags.union(set(task.tags))) - j = j + 1 + if task.action == 'meta': + continue + taskmsg += "\n %s" % task.get_name() + taskmsg += " TAGS: [%s]" % ','.join(mytags.union(set(task.tags))) display.display(taskmsg) - i = i + 1 return 0 else: return results From d4ffc96c8039e5a79baf23be173d03c2e4c8565f Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 16:30:23 -0500 Subject: [PATCH 0179/1113] Further tweaks to the output format of list tasks/tags --- lib/ansible/cli/playbook.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py index e51d5d3993b..d307abdfcc1 100644 --- a/lib/ansible/cli/playbook.py +++ b/lib/ansible/cli/playbook.py @@ -156,7 +156,7 @@ class PlaybookCLI(CLI): for idx, play in enumerate(p['plays']): msg = "\n play #%d (%s): %s" % (idx + 1, ','.join(play.hosts), play.name) mytags = set(play.tags) - msg += ' TAGS: [%s]' % (','.join(mytags)) + msg += '\tTAGS: [%s]' % (','.join(mytags)) if self.options.listhosts: playhosts = set(inventory.get_hosts(play.hosts)) @@ -166,8 +166,11 @@ class PlaybookCLI(CLI): display.display(msg) + all_tags = set() if self.options.listtags or self.options.listtasks: - taskmsg = ' tasks:' + taskmsg = '' + if self.options.listtasks: + taskmsg = ' tasks:\n' all_vars = variable_manager.get_vars(loader=loader, play=play) play_context = PlayContext(play=play, options=self.options) @@ -179,8 +182,18 @@ class PlaybookCLI(CLI): for task in block.block: if task.action == 'meta': continue - taskmsg += "\n %s" % task.get_name() - taskmsg += " TAGS: [%s]" % ','.join(mytags.union(set(task.tags))) + + all_tags.update(task.tags) + if self.options.listtasks: + cur_tags = list(mytags.union(set(task.tags))) + cur_tags.sort() + taskmsg += " %s" % task.action + taskmsg += "\tTAGS: [%s]\n" % ', '.join(cur_tags) + + if self.options.listtags: + cur_tags = list(mytags.union(all_tags)) + cur_tags.sort() + taskmsg += " TASK TAGS: [%s]\n" % ', '.join(cur_tags) display.display(taskmsg) From 4ba7158282f148c90c72f824d6ebcd1a9953b580 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 16:33:23 -0500 Subject: [PATCH 0180/1113] Fixing a mistake from tweaking list stuff too much Use the action only if the task name is not set --- lib/ansible/cli/playbook.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py index d307abdfcc1..dfd06b19208 100644 --- a/lib/ansible/cli/playbook.py +++ b/lib/ansible/cli/playbook.py @@ -187,7 +187,10 @@ class PlaybookCLI(CLI): if self.options.listtasks: cur_tags = list(mytags.union(set(task.tags))) cur_tags.sort() - taskmsg += " %s" % task.action + if task.name: + taskmsg += " %s" % task.get_name() + else: + taskmsg += " %s" % task.action taskmsg += "\tTAGS: [%s]\n" % ', '.join(cur_tags) if self.options.listtags: From 3057fc1753eff42fb073ae866734cb9127cbd25a Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 17 Dec 2015 13:46:15 -0800 Subject: [PATCH 0181/1113] Update submodule ref for mysql_user fix --- lib/ansible/modules/core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index b4a3fdd4933..9366dfb63e5 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit b4a3fdd493378853c0b6ab35d5d8bcf52612a4a0 +Subproject commit 9366dfb63e565c9e0901d714be8832fc89b275d6 From c5eda277ac6ca50cf593a724a368ad973d1a3935 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 17 Dec 2015 17:51:42 -0800 Subject: [PATCH 0182/1113] Fix get_url tests in light of distros backporting SNI support --- .../roles/test_get_url/tasks/main.yml | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index 640c987790f..d7885f0905e 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -16,6 +16,21 @@ # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . +- name: Determine if python looks like it will support modern ssl features like SNI + command: python -c 'from ssl import SSLContext' + ignore_errors: True + register: python_test + +- name: Set python_has_sslcontext if we have it + set_fact: + python_has_ssl_context: True + when: python_test.rc == 0 + +- name: Set python_has_sslcontext False if we don't have it + set_fact: + python_has_ssl_context: False + when: python_test.rc != 0 + - name: test https fetch get_url: url="https://raw.githubusercontent.com/ansible/ansible/devel/README.md" dest={{output_dir}}/get_url.txt force=yes register: result @@ -74,7 +89,7 @@ - command: "grep 'sent the following TLS server name indication extension' {{ output_dir}}/sni.html" register: data_result - when: "{{ ansible_python_version | version_compare('2.7.9', '>=') }}" + when: "{{ python_has_ssl_context }}" # If distros start backporting SNI, can make a new conditional based on whether this works: # python -c 'from ssl import SSLContext' @@ -84,11 +99,11 @@ that: - 'data_result.rc == 0' - '"failed" not in get_url_result' - when: "{{ ansible_python_version | version_compare('2.7.9', '>=') }}" + when: "{{ python_has_ssl_context }}" # If the client doesn't support SNI then get_url should have failed with a certificate mismatch - name: Assert that hostname verification failed because SNI is not supported on this version of python assert: that: - 'get_url_result["failed"]' - when: "{{ ansible_python_version | version_compare('2.7.9', '<') }}" + when: "{{ not python_has_ssl_context }}" From 12c0bb9414224517c6b15ec1d58aedd45d40703d Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 20:52:49 -0500 Subject: [PATCH 0183/1113] Use --source instead of -e for awk in integration Makefile --- test/integration/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/Makefile b/test/integration/Makefile index a2d91f96f1a..dcd30f0b836 100644 --- a/test/integration/Makefile +++ b/test/integration/Makefile @@ -193,5 +193,5 @@ test_lookup_paths: no_log: # This test expects 7 loggable vars and 0 non loggable ones, if either mismatches it fails, run the ansible-playbook command to debug - [ "$$(ansible-playbook no_log_local.yml -i $(INVENTORY) -vvvvv | awk -e 'BEGIN { logme = 0; nolog = 0; } /LOG_ME/ { logme += 1;} /DO_NOT_LOG/ { nolog += 1;} END { printf "%d/%d", logme, nolog; }')" = "6/0" ] + [ "$$(ansible-playbook no_log_local.yml -i $(INVENTORY) -vvvvv | awk --source 'BEGIN { logme = 0; nolog = 0; } /LOG_ME/ { logme += 1;} /DO_NOT_LOG/ { nolog += 1;} END { printf "%d/%d", logme, nolog; }')" = "6/0" ] From 1f3eec293bad4add2e52fbc52a7bbdcc912c3ab8 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 17 Dec 2015 20:06:53 -0800 Subject: [PATCH 0184/1113] Install an updated version of pycrypto on Ubuntu12 from pip --- .../roles/ansible_test_deps/tasks/main.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index ac133730ec5..0b9e58c6598 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -67,6 +67,14 @@ - rubygems-integration when: ansible_distribution == 'Ubuntu' and ansible_distribution_version == "14.04" +# Not sure why CentOS 6 is working without this.... +#- name: Install Red Hat 6 ansible dependencies +# yum: name="{{ item }}" state=installed +# sudo: true +# with_items: +# - python-crypto2.6 +# when: ansible_distribution in ('CentOS', 'RedHat') and ansible_distribution_major_version == "6" + - name: Install ansible pip deps sudo: true pip: name="{{ item }}" @@ -75,6 +83,13 @@ - Jinja2 - paramiko +- name: Install ubuntu 12.04 ansible pip deps + sudo: true + pip: name="{{ item }}" + with_items: + - pycrypto + when: ansible_distribution == 'Ubuntu' and ansible_distribution_version == "12.04" + - name: Remove tty sudo requirement sudo: true lineinfile: "dest=/etc/sudoers regexp='^Defaults[ , ]*requiretty' line='#Defaults requiretty'" From 3143b352c53e2beeecec996d4ca80fa7a4293f93 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 17 Dec 2015 23:07:28 -0500 Subject: [PATCH 0185/1113] Add ca-certificates update to the integration deps playbook --- .../roles/ansible_test_deps/tasks/main.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 0b9e58c6598..85fad6a7fbb 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -53,6 +53,10 @@ - python-virtualenv when: ansible_os_family == 'Debian' +- name: update ca certificates + yum: name=ca-certificates state=latest + when: ansible_os_family == 'RedHat' + - name: Install ubuntu 12.04 ansible dependencies apt: name="{{ item }}" state=installed update_cache=yes sudo: true From a391d6f89ab906d585e623f58789b39fb0797faf Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 17 Dec 2015 20:09:48 -0800 Subject: [PATCH 0186/1113] Add state=latest to pip install of pycrypto --- .../roles/ansible_test_deps/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 85fad6a7fbb..897a4e54edb 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -89,7 +89,7 @@ - name: Install ubuntu 12.04 ansible pip deps sudo: true - pip: name="{{ item }}" + pip: name="{{ item }}" state=latest with_items: - pycrypto when: ansible_distribution == 'Ubuntu' and ansible_distribution_version == "12.04" From 44e30e49dd4b678ff21d308d0e8b00b769de75e1 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 18 Dec 2015 07:47:23 -0500 Subject: [PATCH 0187/1113] Add awk to integration test deps list --- .../roles/ansible_test_deps/tasks/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 897a4e54edb..25b19d040e8 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -33,6 +33,7 @@ - openssl - make - gcc + - gawk - python-devel - libselinux-python - python-virtualenv @@ -49,6 +50,7 @@ - mercurial - git - unzip + - gawk - python-dev - python-virtualenv when: ansible_os_family == 'Debian' From 1debc2da44e05282fea216e4b6e14e83d50bb4ea Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 18 Dec 2015 10:34:27 -0500 Subject: [PATCH 0188/1113] Do a full yum update to make sure packages are latest version For the deps setup of integration tests, as we sometimes see odd errors we can't reproduce, which may be related to slightly out of date package dependencies. --- .../roles/ansible_test_deps/tasks/main.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 25b19d040e8..17198cdc41f 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -55,8 +55,12 @@ - python-virtualenv when: ansible_os_family == 'Debian' -- name: update ca certificates - yum: name=ca-certificates state=latest +#- name: update ca certificates +# yum: name=ca-certificates state=latest +# when: ansible_os_family == 'RedHat' + +- name: update all rpm packages + yum: name=* state=latest when: ansible_os_family == 'RedHat' - name: Install ubuntu 12.04 ansible dependencies From a3dcb910b8b8ad1c1ff65c31102cccd68ed31bf9 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 18 Dec 2015 10:58:55 -0500 Subject: [PATCH 0189/1113] Fixing bugs with {changed,failed}_when and until with registered vars * Saving of the registered variable was occuring after the tests for changed/failed_when. * Each of the above fields and until were being post_validated too early, so variables which were not defined at that time were causing task failures. Fixes #13591 --- lib/ansible/executor/task_executor.py | 11 +++++------ lib/ansible/playbook/task.py | 21 +++++++++++++++++++++ 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index 5d7430fad25..b0a5157a525 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -387,7 +387,6 @@ class TaskExecutor: # make a copy of the job vars here, in case we need to update them # with the registered variable value later on when testing conditions - #vars_copy = variables.copy() vars_copy = variables.copy() display.debug("starting attempt loop") @@ -404,6 +403,11 @@ class TaskExecutor: return dict(unreachable=True, msg=to_unicode(e)) display.debug("handler run complete") + # update the local copy of vars with the registered value, if specified, + # or any facts which may have been generated by the module execution + if self._task.register: + vars_copy[self._task.register] = result + if self._task.async > 0: # the async_wrapper module returns dumped JSON via its stdout # response, so we parse it here and replace the result @@ -433,11 +437,6 @@ class TaskExecutor: return failed_when_result return False - # update the local copy of vars with the registered value, if specified, - # or any facts which may have been generated by the module execution - if self._task.register: - vars_copy[self._task.register] = result - if 'ansible_facts' in result: vars_copy.update(result['ansible_facts']) diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index 17f1952e39c..825ee502691 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -260,6 +260,27 @@ class Task(Base, Conditional, Taggable, Become): break return templar.template(value, convert_bare=True) + def _post_validate_changed_when(self, attr, value, templar): + ''' + changed_when is evaluated after the execution of the task is complete, + and should not be templated during the regular post_validate step. + ''' + return value + + def _post_validate_failed_when(self, attr, value, templar): + ''' + failed_when is evaluated after the execution of the task is complete, + and should not be templated during the regular post_validate step. + ''' + return value + + def _post_validate_until(self, attr, value, templar): + ''' + until is evaluated after the execution of the task is complete, + and should not be templated during the regular post_validate step. + ''' + return value + def get_vars(self): all_vars = dict() if self._block: From f2364ecf5f9abcb11112dc7fe7c7eaffb6703bd1 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 08:10:57 -0800 Subject: [PATCH 0190/1113] Add a Fedora latest host into the mix --- test/utils/ansible-playbook_integration_runner/main.yml | 7 ++++++- .../roles/ansible_test_deps/tasks/main.yml | 4 ++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml index 5d15541490f..9bcda9c71ec 100644 --- a/test/utils/ansible-playbook_integration_runner/main.yml +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -22,7 +22,12 @@ image: "ami-96a818fe" ssh_user: "centos" platform: "centos-7-x86_64" - + - distribution: "Fedora" + version: "23" + image: "ami-518bfb3b" + ssh_user: "fedora" + platform: "fedora-23-x86_64" + tasks: - debug: var=ansible_version - include: ec2.yml diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 17198cdc41f..16bdde79a05 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -16,10 +16,10 @@ - name: Install RH epel yum: name="epel-release" state=installed sudo: true - when: ansible_os_family == 'RedHat' + when: ansible_distribution in ('CentOS', 'RedHat') - name: Install RH ansible dependencies - yum: name="{{ item }}" state=installed + package: name="{{ item }}" state=installed sudo: true with_items: - python-pip From 0c154e81f055e07c78acedc8ac310a8011ff8274 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 18 Dec 2015 11:30:14 -0500 Subject: [PATCH 0191/1113] Make integration tests run in parallel with async --- .../roles/run_integration/tasks/main.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml index 2114567d152..980d4a4d32b 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml @@ -10,11 +10,21 @@ register: results - shell: ". hacking/env-setup && cd test/integration && make {{ run_integration_make_target }}" + async: 3600 + poll: 0 + register: async_test_results sudo: true environment: TEST_FLAGS: "{{ run_integration_test_flags|default(lookup('env', 'TEST_FLAGS')) }}" CREDENTIALS_FILE: "{{ run_integration_credentials_file|default(lookup('env', 'CREDENTIALS_FILE')) }}" args: chdir: "{{ results.stdout }}/ansible" + +- name: poll for test results + async_status: + jid: "{{async_test_results.ansible_job_id}}" register: test_results + until: test_results.finished + retries: 360 + wait: 10 ignore_errors: true From 73a0153b8e3e26ac095e140f6ffa6f8a1d756ff6 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 18 Dec 2015 12:44:57 -0500 Subject: [PATCH 0192/1113] Fix typo in integration test runner role --- .../roles/run_integration/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml index 980d4a4d32b..3eba8285443 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml @@ -26,5 +26,5 @@ register: test_results until: test_results.finished retries: 360 - wait: 10 + delay: 10 ignore_errors: true From 5d798c2725475b045fb06b46cba08c39bfcfeda8 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Fri, 18 Dec 2015 12:14:03 -0500 Subject: [PATCH 0193/1113] added missing features to changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 005171ec9a9..0a5e7e2b7c1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -371,6 +371,8 @@ allowed in future versions: explicitly. Leaving it unset will still use the same user and respect .ssh/config. This also means ansible_ssh_user can now return a None value. * environment variables passed to remote shells now default to 'controller' settings, with fallback to en_us.UTF8 which was the previous default. * ansible-pull now defaults to doing shallow checkouts with git, use `--full` to return to previous behaviour. +* random cows are more random +* when: now gets the registered var after the first iteration, making it possible to break out of item loops * Handling of undefined variables has changed. In most places they will now raise an error instead of silently injecting an empty string. Use the default filter if you want to approximate the old behaviour: ``` From 5dbd7c18a1011e5bc922731574815c22a80d5bc6 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Fri, 18 Dec 2015 13:57:58 -0500 Subject: [PATCH 0194/1113] added note about add_hosts --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a5e7e2b7c1..17180993a2f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -370,6 +370,7 @@ allowed in future versions: * We do not ignore the explicitly set login user for ssh when it matches the 'current user' anymore, this allows overriding .ssh/config when it is set explicitly. Leaving it unset will still use the same user and respect .ssh/config. This also means ansible_ssh_user can now return a None value. * environment variables passed to remote shells now default to 'controller' settings, with fallback to en_us.UTF8 which was the previous default. +* add_hosts is much stricter about host name and will prevent invalid names from being added. * ansible-pull now defaults to doing shallow checkouts with git, use `--full` to return to previous behaviour. * random cows are more random * when: now gets the registered var after the first iteration, making it possible to break out of item loops From 1cc83dd0d968c264c3da4982aa2a658d2e4aeb51 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 11:50:06 -0800 Subject: [PATCH 0195/1113] Make tests that use kennethreitz retry. --- test/integration/roles/test_get_url/tasks/main.yml | 9 +++++++++ test/integration/roles/test_uri/tasks/main.yml | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index d7885f0905e..cbf3b345f18 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -47,6 +47,12 @@ dest: "{{ output_dir }}/shouldnotexist.html" ignore_errors: True register: result + # kennethreitz having trouble staying up. Eventually need to install our own + # certs & web server to test this... also need to install and test it with + # a proxy so the complications are inevitable + until: "'read operation timed out' not in result.msg" + retries: 30 + delay: 10 - stat: path: "{{ output_dir }}/shouldnotexist.html" @@ -65,6 +71,9 @@ dest: "{{ output_dir }}/kreitz.html" validate_certs: no register: result + until: "'read operation timed out' not in result.msg" + retries: 30 + delay: 10 - stat: path: "{{ output_dir }}/kreitz.html" diff --git a/test/integration/roles/test_uri/tasks/main.yml b/test/integration/roles/test_uri/tasks/main.yml index 18229e6b7cf..9ce05938b62 100644 --- a/test/integration/roles/test_uri/tasks/main.yml +++ b/test/integration/roles/test_uri/tasks/main.yml @@ -98,6 +98,12 @@ dest: "{{ output_dir }}/shouldnotexist.html" ignore_errors: True register: result + # kennethreitz having trouble staying up. Eventually need to install our own + # certs & web server to test this... also need to install and test it with + # a proxy so the complications are inevitable + until: "'read operation timed out' not in result.msg" + retries: 30 + delay: 10 - stat: path: "{{ output_dir }}/shouldnotexist.html" @@ -121,6 +127,9 @@ dest: "{{ output_dir }}/kreitz.html" validate_certs: no register: result + until: "'read operation timed out' not in result.msg" + retries: 30 + delay: 10 - stat: path: "{{ output_dir }}/kreitz.html" From 02f65eaa805f39a15e35a813bcd6a1fdc24ade8c Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 18 Dec 2015 14:59:05 -0500 Subject: [PATCH 0196/1113] Make integration runner ec2 add_hosts use valid host names --- test/utils/ansible-playbook_integration_runner/ec2.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/ec2.yml b/test/utils/ansible-playbook_integration_runner/ec2.yml index 59e15f0da1a..d4740d95708 100644 --- a/test/utils/ansible-playbook_integration_runner/ec2.yml +++ b/test/utils/ansible-playbook_integration_runner/ec2.yml @@ -30,7 +30,7 @@ - name: Add hosts group temporary inventory group with pem path add_host: - name: "{{ item.1.platform }} {{ ec2.results[item.0]['instances'][0]['public_ip'] }}" + name: "{{ item.1.platform }}-{{ ec2.results[item.0]['instances'][0]['public_ip'] }}" groups: dynamic_hosts ansible_ssh_host: "{{ ec2.results[item.0]['instances'][0]['public_ip'] }}" ansible_ssh_private_key_file: '{{ pem_path }}' From 0823a2c16f923bd950399dd879b5440356cb8411 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 18 Dec 2015 15:33:44 -0500 Subject: [PATCH 0197/1113] Removing update all for test deps, it didn't fix the problem --- .../roles/ansible_test_deps/tasks/main.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 16bdde79a05..234eb70f92a 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -55,12 +55,8 @@ - python-virtualenv when: ansible_os_family == 'Debian' -#- name: update ca certificates -# yum: name=ca-certificates state=latest -# when: ansible_os_family == 'RedHat' - -- name: update all rpm packages - yum: name=* state=latest +- name: update ca certificates + yum: name=ca-certificates state=latest when: ansible_os_family == 'RedHat' - name: Install ubuntu 12.04 ansible dependencies From 68fe3d856f3a58d4cf84053a803bb5e286d61773 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 14:04:51 -0800 Subject: [PATCH 0198/1113] Fedora 23 needs to have python2 packages installed --- test/utils/ansible-playbook_integration_runner/main.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml index 9bcda9c71ec..8683ffd5440 100644 --- a/test/utils/ansible-playbook_integration_runner/main.yml +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -33,6 +33,15 @@ - include: ec2.yml when: groups['dynamic_hosts'] is not defined +# Have to hardcode these per-slave. We can't even run setup yet so we can't +# introspect what they have. +- hosts: dynamic_hosts + sudo: true + tasks: + - name: Install packages that let setup and package manager modules run + raw: dnf install -y python2 python2-dnf libselinux-python + when: "{{ inventory_hostname }} == 'fedora-23-x86_64'" + - hosts: dynamic_hosts sudo: true vars: From ec60bfbb3f0b88d37b91a2deae2bf6b79a1091dc Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 14:36:17 -0800 Subject: [PATCH 0199/1113] Ubuntu images with hvm ssd --- test/utils/ansible-playbook_integration_runner/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml index 8683ffd5440..b8942172bce 100644 --- a/test/utils/ansible-playbook_integration_runner/main.yml +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -4,12 +4,12 @@ slaves: - distribution: "Ubuntu" version: "12.04" - image: "ami-2ccc7a44" + image: "ami-309ddf5a" ssh_user: "ubuntu" platform: "ubuntu-12.04-x86_64" - distribution: "Ubuntu" version: "14.04" - image: "ami-9a562df2" + image: "ami-d06632ba" ssh_user: "ubuntu" platform: "ubuntu-14.04-x86_64" - distribution: "CentOS" From 26e5bcdb39517e8247e59ac038db7dd641cbb7fa Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 14:38:54 -0800 Subject: [PATCH 0200/1113] Bugfix the fedora 23 install task --- test/utils/ansible-playbook_integration_runner/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml index b8942172bce..e82e0dea3f2 100644 --- a/test/utils/ansible-playbook_integration_runner/main.yml +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -37,10 +37,11 @@ # introspect what they have. - hosts: dynamic_hosts sudo: true + gather_facts: False tasks: - name: Install packages that let setup and package manager modules run raw: dnf install -y python2 python2-dnf libselinux-python - when: "{{ inventory_hostname }} == 'fedora-23-x86_64'" + when: "'{{ inventory_hostname }}' == 'fedora-23-x86_64'" - hosts: dynamic_hosts sudo: true From 78dde62710bd63f931bce21cf4352994a5a36873 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 15:14:38 -0800 Subject: [PATCH 0201/1113] What is going on here --- test/utils/ansible-playbook_integration_runner/ec2.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/utils/ansible-playbook_integration_runner/ec2.yml b/test/utils/ansible-playbook_integration_runner/ec2.yml index d4740d95708..c6971486ec3 100644 --- a/test/utils/ansible-playbook_integration_runner/ec2.yml +++ b/test/utils/ansible-playbook_integration_runner/ec2.yml @@ -28,6 +28,8 @@ - name: Wait a little longer for centos pause: seconds=20 +- debug: var=ec2.results + - name: Add hosts group temporary inventory group with pem path add_host: name: "{{ item.1.platform }}-{{ ec2.results[item.0]['instances'][0]['public_ip'] }}" From f7ed33378e234542950b992499e848a8284cc2fa Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 15:42:41 -0800 Subject: [PATCH 0202/1113] Fix the fedora host detection --- test/utils/ansible-playbook_integration_runner/ec2.yml | 2 -- test/utils/ansible-playbook_integration_runner/main.yml | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/ec2.yml b/test/utils/ansible-playbook_integration_runner/ec2.yml index c6971486ec3..d4740d95708 100644 --- a/test/utils/ansible-playbook_integration_runner/ec2.yml +++ b/test/utils/ansible-playbook_integration_runner/ec2.yml @@ -28,8 +28,6 @@ - name: Wait a little longer for centos pause: seconds=20 -- debug: var=ec2.results - - name: Add hosts group temporary inventory group with pem path add_host: name: "{{ item.1.platform }}-{{ ec2.results[item.0]['instances'][0]['public_ip'] }}" diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml index e82e0dea3f2..4aa17d11c1f 100644 --- a/test/utils/ansible-playbook_integration_runner/main.yml +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -41,7 +41,7 @@ tasks: - name: Install packages that let setup and package manager modules run raw: dnf install -y python2 python2-dnf libselinux-python - when: "'{{ inventory_hostname }}' == 'fedora-23-x86_64'" + when: "'fedora-23' in '{{ inventory_hostname }}'" - hosts: dynamic_hosts sudo: true From 3197eeaaa8d49c862fcb98165bcb254c74e10f4e Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 22:16:49 -0800 Subject: [PATCH 0203/1113] update submodule refs --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 9366dfb63e5..15c1c0cca79 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 9366dfb63e565c9e0901d714be8832fc89b275d6 +Subproject commit 15c1c0cca79196d4dde630db2a7eee90367051cc diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index 06bdec0cac8..c6829752d85 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit 06bdec0cac86ef2339e0b4d8a4616ee24619956f +Subproject commit c6829752d852398c255704cd5d7faa54342e143e From 07a00593066cb439f0b9aea4e815259cc8a2ec75 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 18 Dec 2015 22:23:25 -0800 Subject: [PATCH 0204/1113] update submodule ref for doc fix --- lib/ansible/modules/core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 15c1c0cca79..fcb3397df79 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 15c1c0cca79196d4dde630db2a7eee90367051cc +Subproject commit fcb3397df7944ff15ea698b5717c06e8fc7d43ba From d2ad17e88f5f1bc2ed7282ec4322aaffd869834a Mon Sep 17 00:00:00 2001 From: Matt Clay Date: Sat, 19 Dec 2015 00:08:49 -0800 Subject: [PATCH 0205/1113] Fixed import typo for memcache module in tests. The typo caused the test for the memcached cache plugin to be skipped even when the necessary memcache python module was installed. --- test/units/plugins/cache/test_cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/units/plugins/cache/test_cache.py b/test/units/plugins/cache/test_cache.py index 0547ba55bf0..cd82e1ef2c8 100644 --- a/test/units/plugins/cache/test_cache.py +++ b/test/units/plugins/cache/test_cache.py @@ -26,7 +26,7 @@ from ansible.plugins.cache.memory import CacheModule as MemoryCache HAVE_MEMCACHED = True try: - import memcached + import memcache except ImportError: HAVE_MEMCACHED = False else: From 6127a8585e8eaea159ed5fd91c3ddb61b2d25dc8 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 19 Dec 2015 11:45:59 -0500 Subject: [PATCH 0206/1113] removed invocation info as it is not no_log aware This was added in 1.9 and 2.0 tried to copy, but since it cannot obey no_log restrictions I commented it out. I did not remove as it is still very useful for module invocation debugging. --- lib/ansible/plugins/action/__init__.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index e9b18651d66..c363a47ec32 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -82,13 +82,14 @@ class ActionBase(with_metaclass(ABCMeta, object)): * Module parameters. These are stored in self._task.args """ - # store the module invocation details into the results results = {} - if self._task.async == 0: - results['invocation'] = dict( - module_name = self._task.action, - module_args = self._task.args, - ) + # This does not respect no_log set by module args, left here for debugging module invocation + #if self._task.async == 0: + # # store the module invocation details into the results + # results['invocation'] = dict( + # module_name = self._task.action, + # module_args = self._task.args, + # ) return results def _configure_module(self, module_name, module_args, task_vars=None): From c63ae9948543a3f73ae17dc4eecae7b22fb62947 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 10:10:38 -0800 Subject: [PATCH 0207/1113] Make sure that yum is present on redhat family systems (makes things also work on fedora systems where dnf is the default) --- .../roles/ansible_test_deps/tasks/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 234eb70f92a..89f7382a1e4 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -37,6 +37,8 @@ - python-devel - libselinux-python - python-virtualenv + - yum + - yum-metadata-parser when: ansible_os_family == 'RedHat' - name: Install Debian ansible dependencies From 2936682f004d9d3fc349e31113607636e971b71b Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 11:09:20 -0800 Subject: [PATCH 0208/1113] Revert "removed invocation info as it is not no_log aware" This reverts commit 6127a8585e8eaea159ed5fd91c3ddb61b2d25dc8. --- lib/ansible/plugins/action/__init__.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index c363a47ec32..e9b18651d66 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -82,14 +82,13 @@ class ActionBase(with_metaclass(ABCMeta, object)): * Module parameters. These are stored in self._task.args """ + # store the module invocation details into the results results = {} - # This does not respect no_log set by module args, left here for debugging module invocation - #if self._task.async == 0: - # # store the module invocation details into the results - # results['invocation'] = dict( - # module_name = self._task.action, - # module_args = self._task.args, - # ) + if self._task.async == 0: + results['invocation'] = dict( + module_name = self._task.action, + module_args = self._task.args, + ) return results def _configure_module(self, module_name, module_args, task_vars=None): From d32a885e98f9154f5c74afba482b4299a2e2be5e Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 11:24:59 -0800 Subject: [PATCH 0209/1113] Make return invocation information so that our sanitized copy will take precedence over what the executor knows. --- lib/ansible/module_utils/basic.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index 62b8cadfd61..4870ed096dd 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -1431,7 +1431,6 @@ class AnsibleModule(object): self.log(msg, log_args=log_args) - def _set_cwd(self): try: cwd = os.getcwd() @@ -1524,6 +1523,8 @@ class AnsibleModule(object): self.add_path_info(kwargs) if not 'changed' in kwargs: kwargs['changed'] = False + if 'invocation' not in kwargs: + kwargs['invocation'] = self.params kwargs = remove_values(kwargs, self.no_log_values) self.do_cleanup_files() print(self.jsonify(kwargs)) @@ -1534,6 +1535,8 @@ class AnsibleModule(object): self.add_path_info(kwargs) assert 'msg' in kwargs, "implementation error -- msg to explain the error is required" kwargs['failed'] = True + if 'invocation' not in kwargs: + kwargs['invocation'] = self.params kwargs = remove_values(kwargs, self.no_log_values) self.do_cleanup_files() print(self.jsonify(kwargs)) From 51cca87d67823f4edfc4e05bf3e5a4070e494113 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 11:27:16 -0800 Subject: [PATCH 0210/1113] Also need redhat-rpm-config to compile pycrypto --- .../roles/ansible_test_deps/tasks/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index 89f7382a1e4..de08126b82d 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -39,6 +39,7 @@ - python-virtualenv - yum - yum-metadata-parser + - redhat-rpm-config when: ansible_os_family == 'RedHat' - name: Install Debian ansible dependencies From 8ffc1fa838d7e984f4a99568021660cbbd243550 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 11:31:46 -0800 Subject: [PATCH 0211/1113] Comment to explain why we strip _ansible_notify specially --- lib/ansible/plugins/action/normal.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/ansible/plugins/action/normal.py b/lib/ansible/plugins/action/normal.py index bf93fdad2d7..f9b55e1ff57 100644 --- a/lib/ansible/plugins/action/normal.py +++ b/lib/ansible/plugins/action/normal.py @@ -28,11 +28,13 @@ class ActionModule(ActionBase): results = super(ActionModule, self).run(tmp, task_vars) results.update(self._execute_module(tmp=tmp, task_vars=task_vars)) - # Remove special fields from the result, which can only be set # internally by the executor engine. We do this only here in # the 'normal' action, as other action plugins may set this. - for field in ('ansible_notify',): + # + # We don't want modules to determine that running the module fires + # notify handlers. That's for the playbook to decide. + for field in ('_ansible_notify',): if field in results: results.pop(field) From 224d5963361deb33107e5f38fd28a4d5197f931e Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 11:51:16 -0800 Subject: [PATCH 0212/1113] Remove args from get_name() as we can't tell if any of the args are no_log --- lib/ansible/playbook/task.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index 825ee502691..fb757864745 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -107,11 +107,10 @@ class Task(Base, Conditional, Taggable, Become): elif self.name: return self.name else: - flattened_args = self._merge_kv(self.args) if self._role: - return "%s : %s %s" % (self._role.get_name(), self.action, flattened_args) + return "%s : %s" % (self._role.get_name(), self.action) else: - return "%s %s" % (self.action, flattened_args) + return "%s" % (self.action,) def _merge_kv(self, ds): if ds is None: From 9abef1a1d7e8df5e580e17ef4a54cec280fbc7dc Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 12:39:48 -0800 Subject: [PATCH 0213/1113] Troubleshooting has reduced us to this --- test/integration/roles/test_get_url/tasks/main.yml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index cbf3b345f18..54debc06d10 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -96,12 +96,22 @@ register: get_url_result ignore_errors: True +- name: TROUBLESHOOTING + shell: curl https://foo.sni.velox.ch/ > /var/tmp/velox.html + register: trouble + ignore_errors: True + when: "{{ python_has_ssl_context }}" + +- debug: var=trouble + when: "{{ python_has_ssl_context }}" + +- debug: var=get_url_result + when: "{{ python_has_ssl_context }}" + - command: "grep 'sent the following TLS server name indication extension' {{ output_dir}}/sni.html" register: data_result when: "{{ python_has_ssl_context }}" -# If distros start backporting SNI, can make a new conditional based on whether this works: -# python -c 'from ssl import SSLContext' - debug: var=get_url_result - name: Assert that SNI works with this python version assert: From e66c070e5c0d50f0a90fcd3b73044a6faeef7c81 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 13:00:58 -0800 Subject: [PATCH 0214/1113] Add package module to squash list --- lib/ansible/constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 7f74358dd5d..5df9602246a 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -201,7 +201,7 @@ DEFAULT_BECOME_ASK_PASS = get_config(p, 'privilege_escalation', 'become_ask_pa # the module takes both, bad things could happen. # In the future we should probably generalize this even further # (mapping of param: squash field) -DEFAULT_SQUASH_ACTIONS = get_config(p, DEFAULTS, 'squash_actions', 'ANSIBLE_SQUASH_ACTIONS', "apt, yum, pkgng, zypper, dnf", islist=True) +DEFAULT_SQUASH_ACTIONS = get_config(p, DEFAULTS, 'squash_actions', 'ANSIBLE_SQUASH_ACTIONS', "apt, dnf, package, pkgng, yum, zypper", islist=True) # paths DEFAULT_ACTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'action_plugins', 'ANSIBLE_ACTION_PLUGINS', '~/.ansible/plugins/action:/usr/share/ansible/plugins/action', ispath=True) DEFAULT_CACHE_PLUGIN_PATH = get_config(p, DEFAULTS, 'cache_plugins', 'ANSIBLE_CACHE_PLUGINS', '~/.ansible/plugins/cache:/usr/share/ansible/plugins/cache', ispath=True) From bb2935549f38a83670baadb74041ef98902e0640 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sat, 19 Dec 2015 16:14:56 -0500 Subject: [PATCH 0215/1113] corrected service detection in docker versions now if 1 == bash it falls back into tool detection --- lib/ansible/module_utils/facts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py index 94a5a11f726..796ebc92bdd 100644 --- a/lib/ansible/module_utils/facts.py +++ b/lib/ansible/module_utils/facts.py @@ -555,8 +555,8 @@ class Facts(object): if proc_1 is None: rc, proc_1, err = module.run_command("ps -p 1 -o comm|tail -n 1", use_unsafe_shell=True) - if proc_1 in ['init', '/sbin/init']: - # many systems return init, so this cannot be trusted + if proc_1 in ['init', '/sbin/init', 'bash']: + # many systems return init, so this cannot be trusted, bash is from docker proc_1 = None # if not init/None it should be an identifiable or custom init, so we are done! From e2d9f4e2f272c6010b0c00257aa695c1606e05ab Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 15:49:06 -0800 Subject: [PATCH 0216/1113] Fix unittests for return of invocation from fail_json and exit_json --- test/units/module_utils/basic/test_exit_json.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/test/units/module_utils/basic/test_exit_json.py b/test/units/module_utils/basic/test_exit_json.py index 66610ec3ed3..931447f8ab6 100644 --- a/test/units/module_utils/basic/test_exit_json.py +++ b/test/units/module_utils/basic/test_exit_json.py @@ -56,7 +56,7 @@ class TestAnsibleModuleExitJson(unittest.TestCase): else: self.assertEquals(ctx.exception.code, 0) return_val = json.loads(self.fake_stream.getvalue()) - self.assertEquals(return_val, dict(changed=False)) + self.assertEquals(return_val, dict(changed=False, invocation={})) def test_exit_json_args_exits(self): with self.assertRaises(SystemExit) as ctx: @@ -67,7 +67,7 @@ class TestAnsibleModuleExitJson(unittest.TestCase): else: self.assertEquals(ctx.exception.code, 0) return_val = json.loads(self.fake_stream.getvalue()) - self.assertEquals(return_val, dict(msg="message", changed=False)) + self.assertEquals(return_val, dict(msg="message", changed=False, invocation={})) def test_fail_json_exits(self): with self.assertRaises(SystemExit) as ctx: @@ -78,13 +78,13 @@ class TestAnsibleModuleExitJson(unittest.TestCase): else: self.assertEquals(ctx.exception.code, 1) return_val = json.loads(self.fake_stream.getvalue()) - self.assertEquals(return_val, dict(msg="message", failed=True)) + self.assertEquals(return_val, dict(msg="message", failed=True, invocation={})) def test_exit_json_proper_changed(self): with self.assertRaises(SystemExit) as ctx: self.module.exit_json(changed=True, msg='success') return_val = json.loads(self.fake_stream.getvalue()) - self.assertEquals(return_val, dict(changed=True, msg='success')) + self.assertEquals(return_val, dict(changed=True, msg='success', invocation={})) @unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is not supported on targets (yet)") class TestAnsibleModuleExitValuesRemoved(unittest.TestCase): @@ -94,19 +94,22 @@ class TestAnsibleModuleExitValuesRemoved(unittest.TestCase): dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/', not_secret='following the leader', msg='here'), dict(one=1, pwd=OMIT, url='https://username:password12345@foo.com/login/', - not_secret='following the leader', changed=False, msg='here') + not_secret='following the leader', changed=False, msg='here', + invocation=dict(password=OMIT, token=None, username='person')), ), (dict(username='person', password='password12345'), dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/', not_secret='following the leader', msg='here'), dict(one=1, pwd='$ecret k3y', url='https://username:********@foo.com/login/', - not_secret='following the leader', changed=False, msg='here') + not_secret='following the leader', changed=False, msg='here', + invocation=dict(password=OMIT, token=None, username='person')), ), (dict(username='person', password='$ecret k3y'), dict(one=1, pwd='$ecret k3y', url='https://username:$ecret k3y@foo.com/login/', not_secret='following the leader', msg='here'), dict(one=1, pwd=OMIT, url='https://username:********@foo.com/login/', - not_secret='following the leader', changed=False, msg='here') + not_secret='following the leader', changed=False, msg='here', + invocation=dict(password=OMIT, token=None, username='person')), ), ) From 3ec0104128103c4c37c117b5ef4548733245bcf4 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Sat, 19 Dec 2015 12:49:06 -0500 Subject: [PATCH 0217/1113] Fixing bugs in conditional testing with until and some integration runner tweaks --- lib/ansible/executor/task_executor.py | 8 ++--- lib/ansible/playbook/conditional.py | 36 +++++++++---------- lib/ansible/playbook/task.py | 2 +- .../main.yml | 2 +- .../roles/ansible_test_deps/tasks/main.yml | 1 + .../roles/run_integration/tasks/main.yml | 17 ++++----- 6 files changed, 34 insertions(+), 32 deletions(-) diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index b0a5157a525..c8b6fa179bc 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -35,7 +35,7 @@ from ansible.template import Templar from ansible.utils.encrypt import key_for_hostname from ansible.utils.listify import listify_lookup_plugin_terms from ansible.utils.unicode import to_unicode -from ansible.vars.unsafe_proxy import UnsafeProxy +from ansible.vars.unsafe_proxy import UnsafeProxy, wrap_var try: from __main__ import display @@ -406,7 +406,7 @@ class TaskExecutor: # update the local copy of vars with the registered value, if specified, # or any facts which may have been generated by the module execution if self._task.register: - vars_copy[self._task.register] = result + vars_copy[self._task.register] = wrap_var(result.copy()) if self._task.async > 0: # the async_wrapper module returns dumped JSON via its stdout @@ -453,7 +453,7 @@ class TaskExecutor: if attempt < retries - 1: cond = Conditional(loader=self._loader) - cond.when = self._task.until + cond.when = [ self._task.until ] if cond.evaluate_conditional(templar, vars_copy): break @@ -466,7 +466,7 @@ class TaskExecutor: # do the final update of the local variables here, for both registered # values and any facts which may have been created if self._task.register: - variables[self._task.register] = result + variables[self._task.register] = wrap_var(result) if 'ansible_facts' in result: variables.update(result['ansible_facts']) diff --git a/lib/ansible/playbook/conditional.py b/lib/ansible/playbook/conditional.py index fc178e2fa1d..c8c6a9359ec 100644 --- a/lib/ansible/playbook/conditional.py +++ b/lib/ansible/playbook/conditional.py @@ -22,7 +22,7 @@ __metaclass__ = type from jinja2.exceptions import UndefinedError from ansible.compat.six import text_type -from ansible.errors import AnsibleError +from ansible.errors import AnsibleError, AnsibleUndefinedVariable from ansible.playbook.attribute import FieldAttribute from ansible.template import Templar @@ -89,16 +89,22 @@ class Conditional: # make sure the templar is using the variables specifed to this method templar.set_available_variables(variables=all_vars) - conditional = templar.template(conditional) - if not isinstance(conditional, basestring) or conditional == "": - return conditional - - # a Jinja2 evaluation that results in something Python can eval! - presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional - conditional = templar.template(presented, fail_on_undefined=False) - - val = conditional.strip() - if val == presented: + try: + conditional = templar.template(conditional) + if not isinstance(conditional, text_type) or conditional == "": + return conditional + + # a Jinja2 evaluation that results in something Python can eval! + presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional + conditional = templar.template(presented) + val = conditional.strip() + if val == "True": + return True + elif val == "False": + return False + else: + raise AnsibleError("unable to evaluate conditional: %s" % original) + except (AnsibleUndefinedVariable, UndefinedError) as e: # the templating failed, meaning most likely a # variable was undefined. If we happened to be # looking for an undefined variable, return True, @@ -108,11 +114,5 @@ class Conditional: elif "is defined" in original: return False else: - raise AnsibleError("error while evaluating conditional: %s (%s)" % (original, presented)) - elif val == "True": - return True - elif val == "False": - return False - else: - raise AnsibleError("unable to evaluate conditional: %s" % original) + raise AnsibleError("error while evaluating conditional (%s): %s" % (original, e)) diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index fb757864745..62b8cbc999b 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -82,7 +82,7 @@ class Task(Base, Conditional, Taggable, Become): _poll = FieldAttribute(isa='int') _register = FieldAttribute(isa='string') _retries = FieldAttribute(isa='int', default=3) - _until = FieldAttribute(isa='list') + _until = FieldAttribute(isa='string') def __init__(self, block=None, role=None, task_include=None): ''' constructors a task, without the Task.load classmethod, it will be pretty blank ''' diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml index 4aa17d11c1f..27c4ae51b0d 100644 --- a/test/utils/ansible-playbook_integration_runner/main.yml +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -74,4 +74,4 @@ - name: Fail shell: 'echo "{{ inventory_hostname }}, Failed" && exit 1' - when: "test_results.rc != 0" + when: "'rc' not in test_results or test_results.rc != 0" diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index de08126b82d..d9611497e91 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -59,6 +59,7 @@ when: ansible_os_family == 'Debian' - name: update ca certificates + sudo: true yum: name=ca-certificates state=latest when: ansible_os_family == 'RedHat' diff --git a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml index 3eba8285443..2d01999dbfd 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml @@ -6,10 +6,12 @@ - name: Get ansible source dir sudo: false - shell: "cd ~ && pwd" + shell: "cd ~/ansible && pwd" register: results -- shell: ". hacking/env-setup && cd test/integration && make {{ run_integration_make_target }}" +- shell: "ls -la && . hacking/env-setup && cd test/integration && make {{ run_integration_make_target }}" + args: + chdir: "{{ results.stdout }}" async: 3600 poll: 0 register: async_test_results @@ -17,14 +19,13 @@ environment: TEST_FLAGS: "{{ run_integration_test_flags|default(lookup('env', 'TEST_FLAGS')) }}" CREDENTIALS_FILE: "{{ run_integration_credentials_file|default(lookup('env', 'CREDENTIALS_FILE')) }}" - args: - chdir: "{{ results.stdout }}/ansible" - name: poll for test results - async_status: - jid: "{{async_test_results.ansible_job_id}}" + async_status: jid="{{async_test_results.ansible_job_id}}" register: test_results until: test_results.finished - retries: 360 - delay: 10 + retries: 120 + delay: 30 ignore_errors: true + +- debug: var=test_results From 3da312da9c1a92d5e8f47f3274338e4ef476b5a6 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sat, 19 Dec 2015 23:11:25 -0800 Subject: [PATCH 0218/1113] Switch from yum to package when installing sudo so that dnf is handled as well --- .../roles/ansible_test_deps/tasks/main.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml index d9611497e91..832138527f9 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/ansible_test_deps/tasks/main.yml @@ -4,14 +4,8 @@ when: ansible_os_family == 'Debian' - name: Install sudo - yum: name=sudo state=installed + package: name=sudo state=installed ignore_errors: true - when: ansible_os_family == 'RedHat' - -- name: Install sudo - apt: name=sudo state=installed - ignore_errors: true - when: ansible_os_family == 'Debian' - name: Install RH epel yum: name="epel-release" state=installed From 6ec58bbd5f86bd4f2ca8aa6e7af78ee8ef28ee98 Mon Sep 17 00:00:00 2001 From: Branko Majic Date: Sun, 20 Dec 2015 14:19:20 +0100 Subject: [PATCH 0219/1113] Adding documentation for the 'dig' lookup (#13126). --- docsite/rst/playbooks_lookups.rst | 106 ++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) diff --git a/docsite/rst/playbooks_lookups.rst b/docsite/rst/playbooks_lookups.rst index 25560e284d4..3c2222c337b 100644 --- a/docsite/rst/playbooks_lookups.rst +++ b/docsite/rst/playbooks_lookups.rst @@ -240,6 +240,112 @@ If you're not using 2.0 yet, you can do something similar with the credstash too debug: msg="Poor man's credstash lookup! {{ lookup('pipe', 'credstash -r us-west-1 get my-other-password') }}" +.. _dns_lookup: + +The DNS Lookup (dig) +```````````````````` +.. versionadded:: 1.9.0 + +.. warning:: This lookup depends on the `dnspython `_ + library. + +The ``dig`` lookup runs queries against DNS servers to retrieve DNS records for +a specific name (*FQDN* - fully qualified domain name). It is possible to lookup any DNS record in this manner. + +There is a couple of different syntaxes that can be used to specify what record +should be retrieved, and for which name. It is also possible to explicitly +specify the DNS server(s) to use for lookups. + +In its simplest form, the ``dig`` lookup plugin can be used to retrieve an IPv4 +address (DNS ``A`` record) associated with *FQDN*: + +.. note:: If you need to obtain the ``AAAA`` record (IPv6 address), you must + specify the record type explicitly. Syntax for specifying the record + type is described below. + +.. note:: The trailing dot in most of the examples listed is purely optional, + but is specified for completeness/correctness sake. + +:: + + - debug: msg="The IPv4 address for example.com. is {{ lookup('dig', 'example.com.')}}" + +In addition to (default) ``A`` record, it is also possible to specify a different +record type that should be queried. This can be done by either passing-in +additional parameter of format ``qtype=TYPE`` to the ``dig`` lookup, or by +appending ``/TYPE`` to the *FQDN* being queried. For example:: + + - debug: msg="The TXT record for gmail.com. is {{ lookup('dig', 'gmail.com.', 'qtype=TXT') }}" + - debug: msg="The TXT record for gmail.com. is {{ lookup('dig', 'gmail.com./TXT') }}" + +If multiple values are associated with the requested record, the results will be +returned as a comma-separated list. In such cases you may want to pass option +``wantlist=True`` to the plugin, which will result in the record values being +returned as a list over which you can iterate later on:: + + - debug: msg="One of the MX records for gmail.com. is {{ item }}" + with_items: "{{ lookup('dig', 'gmail.com./MX', wantlist=True) }}" + +In case of reverse DNS lookups (``PTR`` records), you can also use a convenience +syntax of format ``IP_ADDRESS/PTR``. The following three lines would produce the +same output:: + + - debug: msg="Reverse DNS for 8.8.8.8 is {{ lookup('dig', '8.8.8.8/PTR') }}" + - debug: msg="Reverse DNS for 8.8.8.8 is {{ lookup('dig', '8.8.8.8.in-addr.arpa./PTR') }}" + - debug: msg="Reverse DNS for 8.8.8.8 is {{ lookup('dig', '8.8.8.8.in-addr.arpa.', 'qtype=PTR') }}" + +By default, the lookup will rely on system-wide configured DNS servers for +performing the query. It is also possible to explicitly specify DNS servers to +query using the ``@DNS_SERVER_1,DNS_SERVER_2,...,DNS_SERVER_N`` notation. This +needs to be passed-in as an additional parameter to the lookup. For example:: + + - debug: msg="Querying 8.8.8.8 for IPv4 address for example.com. produces {{ lookup('dig', 'example.com', '@8.8.8.8') }}" + +In some cases the DNS records may hold a more complex data structure, or it may +be useful to obtain the results in a form of a dictionary for future +processing. The ``dig`` lookup supports parsing of a number of such records, +with the result being returned as a dictionary. This way it is possible to +easily access such nested data. This return format can be requested by +passing-in the ``flat=0`` option to the lookup. For example:: + + - debug: msg="XMPP service for gmail.com. is available at {{ item.target }} on port {{ item.port }}" + with_items: "{{ lookup('dig', '_xmpp-server._tcp.gmail.com./SRV', 'flat=0', wantlist=True) }}" + +Take note that due to the way Ansible lookups work, you must pass the +``wantlist=True`` argument to the lookup, otherwise Ansible will report errors. + +Currently the dictionary results are supported for the following records: + +.. note:: *ALL* is not a record per-se, merely the listed fields are available + for any record results you retrieve in the form of a dictionary. + +========== ============================================================================= +Record Fields +---------- ----------------------------------------------------------------------------- +*ALL* owner, ttl, type +A address +AAAA address +CNAME target +DNAME target +DLV algorithm, digest_type, key_tag, digest +DNSKEY flags, algorithm, protocol, key +DS algorithm, digest_type, key_tag, digest +HINFO cpu, os +LOC latitude, longitude, altitude, size, horizontal_precision, vertical_precision +MX preference, exchange +NAPTR order, preference, flags, service, regexp, replacement +NS target +NSEC3PARAM algorithm, flags, iterations, salt +PTR target +RP mbox, txt +SOA mname, rname, serial, refresh, retry, expire, minimum +SPF strings +SRV priority, weight, port, target +SSHFP algorithm, fp_type, fingerprint +TLSA usage, selector, mtype, cert +TXT strings +========== ============================================================================= + .. _more_lookups: More Lookups From b90506341ac77c4885efe754ae401b90b0f61a7f Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 20 Dec 2015 08:06:26 -0800 Subject: [PATCH 0220/1113] Fixes for tests that assumed yum as package manager for systems that have dnf --- .../roles/ec2_elb_instance_setup/tasks/main.yml | 7 ++++++- .../roles/setup_postgresql_db/tasks/main.yml | 8 ++++---- test/integration/roles/test_apt/tasks/main.yml | 1 - .../test_docker/tasks/docker-setup-rht.yml | 17 ++++++++--------- .../roles/test_unarchive/tasks/main.yml | 4 ++++ test/integration/roles/test_yum/tasks/main.yml | 2 ++ 6 files changed, 24 insertions(+), 15 deletions(-) diff --git a/test/integration/roles/ec2_elb_instance_setup/tasks/main.yml b/test/integration/roles/ec2_elb_instance_setup/tasks/main.yml index 341392b00c7..79584893ed8 100644 --- a/test/integration/roles/ec2_elb_instance_setup/tasks/main.yml +++ b/test/integration/roles/ec2_elb_instance_setup/tasks/main.yml @@ -5,7 +5,12 @@ # install apache on the ec2 instances - name: install apache on new ec2 instances - yum: name=httpd + package: name=httpd + when: ansible_os_family == 'RedHat' + +- name: install apache on new ec2 instances + package: name=apache + when: ansible_os_family == 'Debian' - name: start and enable apache service: name=httpd state=started enabled=yes diff --git a/test/integration/roles/setup_postgresql_db/tasks/main.yml b/test/integration/roles/setup_postgresql_db/tasks/main.yml index fbcc9cab725..c25318a2adc 100644 --- a/test/integration/roles/setup_postgresql_db/tasks/main.yml +++ b/test/integration/roles/setup_postgresql_db/tasks/main.yml @@ -9,9 +9,9 @@ # Make sure we start fresh - name: remove rpm dependencies for postgresql test - yum: name={{ item }} state=absent + package: name={{ item }} state=absent with_items: postgresql_packages - when: ansible_pkg_mgr == 'yum' + when: ansible_os_family == "RedHat" - name: remove dpkg dependencies for postgresql test apt: name={{ item }} state=absent @@ -35,9 +35,9 @@ when: ansible_os_family == "Debian" - name: install rpm dependencies for postgresql test - yum: name={{ item }} state=latest + package: name={{ item }} state=latest with_items: postgresql_packages - when: ansible_pkg_mgr == 'yum' + when: ansible_os_family == "RedHat" - name: install dpkg dependencies for postgresql test apt: name={{ item }} state=latest diff --git a/test/integration/roles/test_apt/tasks/main.yml b/test/integration/roles/test_apt/tasks/main.yml index 8976087371d..552b543d2d3 100644 --- a/test/integration/roles/test_apt/tasks/main.yml +++ b/test/integration/roles/test_apt/tasks/main.yml @@ -1,4 +1,3 @@ -# test code for the yum module # (c) 2014, James Tanner # This file is part of Ansible diff --git a/test/integration/roles/test_docker/tasks/docker-setup-rht.yml b/test/integration/roles/test_docker/tasks/docker-setup-rht.yml index 3ba234ecffc..c25821c3be0 100644 --- a/test/integration/roles/test_docker/tasks/docker-setup-rht.yml +++ b/test/integration/roles/test_docker/tasks/docker-setup-rht.yml @@ -1,18 +1,17 @@ -- name: Install docker packages (yum) - yum: +- name: Install docker packages (rht family) + package: state: present name: docker-io,docker-registry,python-docker-py,nginx -- name: Install netcat - yum: +- name: Install netcat (Fedora) + package: state: present name: nmap-ncat - # RHEL7 as well... - when: ansible_distribution == 'Fedora' + when: ansible_distribution == 'Fedora' or (ansible_os_family == 'RedHat' and ansible_distribution_version|version_compare('>=', 7)) -- name: Install netcat - yum: +- name: Install netcat (RHEL) + package: state: present name: nc - when: ansible_distribution != 'Fedora' + when: ansible_distribution != 'Fedora' and (ansible_os_family == 'RedHat' and ansible_distribution_version|version_compare('<', 7)) diff --git a/test/integration/roles/test_unarchive/tasks/main.yml b/test/integration/roles/test_unarchive/tasks/main.yml index c26d3aeb101..e4f438e5256 100644 --- a/test/integration/roles/test_unarchive/tasks/main.yml +++ b/test/integration/roles/test_unarchive/tasks/main.yml @@ -21,6 +21,10 @@ yum: name=zip state=latest when: ansible_pkg_mgr == 'yum' +- name: Ensure zip is present to create test archive (dnf) + dnf: name=zip state=latest + when: ansible_pkg_mgr == 'dnf' + - name: Ensure zip is present to create test archive (apt) apt: name=zip state=latest when: ansible_pkg_mgr == 'apt' diff --git a/test/integration/roles/test_yum/tasks/main.yml b/test/integration/roles/test_yum/tasks/main.yml index 5df887ae9f9..b17af6b465b 100644 --- a/test/integration/roles/test_yum/tasks/main.yml +++ b/test/integration/roles/test_yum/tasks/main.yml @@ -16,6 +16,8 @@ # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . +# Note: We install the yum package onto Fedora so that this will work on dnf systems +# We want to test that for people who don't want to upgrade their systems. - include: 'yum.yml' when: ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora'] From 5fef2c429763db8d088a20c97320936ee06e7fc8 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 20 Dec 2015 09:11:53 -0800 Subject: [PATCH 0221/1113] Try updating the centos7 image to a newer version (trying to resolve issue being unable to connect to some webservers) --- test/utils/ansible-playbook_integration_runner/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/main.yml b/test/utils/ansible-playbook_integration_runner/main.yml index 27c4ae51b0d..f1bd26b7ead 100644 --- a/test/utils/ansible-playbook_integration_runner/main.yml +++ b/test/utils/ansible-playbook_integration_runner/main.yml @@ -19,7 +19,7 @@ platform: "centos-6.5-x86_64" - distribution: "CentOS" version: "7" - image: "ami-96a818fe" + image: "ami-61bbf104" ssh_user: "centos" platform: "centos-7-x86_64" - distribution: "Fedora" From 6ae04c1e4f698629610030a74f5bb5fc501f5a1e Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Sun, 20 Dec 2015 12:37:24 -0500 Subject: [PATCH 0222/1113] Fix logic in PlayIterator when inserting tasks during rescue/always Because the fail_state is potentially non-zero in these block sections, the prior logic led to included tasks not being inserted at all. Related issue: #13605 --- lib/ansible/executor/play_iterator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/executor/play_iterator.py b/lib/ansible/executor/play_iterator.py index 795eed2a8c1..534f216c30a 100644 --- a/lib/ansible/executor/play_iterator.py +++ b/lib/ansible/executor/play_iterator.py @@ -397,7 +397,7 @@ class PlayIterator: def _insert_tasks_into_state(self, state, task_list): # if we've failed at all, or if the task list is empty, just return the current state - if state.fail_state != self.FAILED_NONE or not task_list: + if state.fail_state != self.FAILED_NONE and state.run_state not in (self.ITERATING_RESCUE, self.ITERATING_ALWAYS) or not task_list: return state if state.run_state == self.ITERATING_TASKS: From 8d7892cc7b7a95c4efda003c8b187d1bc4875a5f Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 20 Dec 2015 10:13:33 -0800 Subject: [PATCH 0223/1113] Done troubleshooting Revert "Troubleshooting has reduced us to this" This reverts commit 9abef1a1d7e8df5e580e17ef4a54cec280fbc7dc. --- test/integration/roles/test_get_url/tasks/main.yml | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index 54debc06d10..cbf3b345f18 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -96,22 +96,12 @@ register: get_url_result ignore_errors: True -- name: TROUBLESHOOTING - shell: curl https://foo.sni.velox.ch/ > /var/tmp/velox.html - register: trouble - ignore_errors: True - when: "{{ python_has_ssl_context }}" - -- debug: var=trouble - when: "{{ python_has_ssl_context }}" - -- debug: var=get_url_result - when: "{{ python_has_ssl_context }}" - - command: "grep 'sent the following TLS server name indication extension' {{ output_dir}}/sni.html" register: data_result when: "{{ python_has_ssl_context }}" +# If distros start backporting SNI, can make a new conditional based on whether this works: +# python -c 'from ssl import SSLContext' - debug: var=get_url_result - name: Assert that SNI works with this python version assert: From 3792a586b51ce598ab71bfab004a4bd97f004101 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 20 Dec 2015 11:33:42 -0800 Subject: [PATCH 0224/1113] Since the velox test server seems to be dropping using iptables to drop requests from aws, test via a different website instead --- .../roles/test_get_url/tasks/main.yml | 45 +++++++++++++++---- 1 file changed, 37 insertions(+), 8 deletions(-) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index cbf3b345f18..a0ff3797a87 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -85,23 +85,51 @@ - "result.changed == true" - "stat_result.stat.exists == true" -# SNI Tests -# SNI is only built into the stdlib from python-2.7.9 onwards +# At the moment, AWS can't make an https request to velox.ch... connection +# timed out. So we'll use a different test until/unless the problem is resolved +## SNI Tests +## SNI is only built into the stdlib from python-2.7.9 onwards +#- name: Test that SNI works +# get_url: +# # A test site that returns a page with information on what SNI information +# # the client sent. A failure would have the string: did not send a TLS server name indication extension +# url: 'https://foo.sni.velox.ch/' +# dest: "{{ output_dir }}/sni.html" +# register: get_url_result +# ignore_errors: True +# +#- command: "grep 'sent the following TLS server name indication extension' {{ output_dir}}/sni.html" +# register: data_result +# when: "{{ python_has_ssl_context }}" +# +#- debug: var=get_url_result +#- name: Assert that SNI works with this python version +# assert: +# that: +# - 'data_result.rc == 0' +# - '"failed" not in get_url_result' +# when: "{{ python_has_ssl_context }}" +# +## If the client doesn't support SNI then get_url should have failed with a certificate mismatch +#- name: Assert that hostname verification failed because SNI is not supported on this version of python +# assert: +# that: +# - 'get_url_result["failed"]' +# when: "{{ not python_has_ssl_context }}" + +# These tests are just side effects of how the site is hosted. It's not +# specifically a test site. So the tests may break due to the hosting changing - name: Test that SNI works get_url: - # A test site that returns a page with information on what SNI information - # the client sent. A failure would have the string: did not send a TLS server name indication extension - url: 'https://foo.sni.velox.ch/' + url: 'https://www.mnot.net/blog/2014/05/09/if_you_can_read_this_youre_sniing' dest: "{{ output_dir }}/sni.html" register: get_url_result ignore_errors: True -- command: "grep 'sent the following TLS server name indication extension' {{ output_dir}}/sni.html" +- command: "grep '

If You Can Read This, You're SNIing

' {{ output_dir}}/sni.html" register: data_result when: "{{ python_has_ssl_context }}" -# If distros start backporting SNI, can make a new conditional based on whether this works: -# python -c 'from ssl import SSLContext' - debug: var=get_url_result - name: Assert that SNI works with this python version assert: @@ -116,3 +144,4 @@ that: - 'get_url_result["failed"]' when: "{{ not python_has_ssl_context }}" +# End hacky SNI test section From 21ca0ce1ce12eb4e487d479abdc355972d2c2309 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 20 Dec 2015 11:46:49 -0800 Subject: [PATCH 0225/1113] Fix test playbook syntax --- test/integration/roles/test_get_url/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index a0ff3797a87..630287c9871 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -126,7 +126,7 @@ register: get_url_result ignore_errors: True -- command: "grep '

If You Can Read This, You're SNIing

' {{ output_dir}}/sni.html" +- command: "grep '

If You Can Read This, You\\'re SNIing

' {{ output_dir}}/sni.html" register: data_result when: "{{ python_has_ssl_context }}" From 6963955cb4a607c8548669136cb266c25d9f9ceb Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Sun, 20 Dec 2015 11:51:32 -0800 Subject: [PATCH 0226/1113] And change the task a little more since different shlex versions are handling the quotes differently --- test/integration/roles/test_get_url/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml index 630287c9871..9ed0549ec47 100644 --- a/test/integration/roles/test_get_url/tasks/main.yml +++ b/test/integration/roles/test_get_url/tasks/main.yml @@ -126,7 +126,7 @@ register: get_url_result ignore_errors: True -- command: "grep '

If You Can Read This, You\\'re SNIing

' {{ output_dir}}/sni.html" +- command: "grep '

If You Can Read This, You.re SNIing

' {{ output_dir}}/sni.html" register: data_result when: "{{ python_has_ssl_context }}" From 8b5e5538285f03c360807fd1e09c00a77d52bd94 Mon Sep 17 00:00:00 2001 From: Rene Moser Date: Mon, 16 Nov 2015 18:38:27 +0100 Subject: [PATCH 0227/1113] cloudstack: add tests for cs_volume --- test/integration/cloudstack.yml | 1 + .../roles/test_cs_volume/defaults/main.yml | 6 + .../roles/test_cs_volume/meta/main.yml | 3 + .../roles/test_cs_volume/tasks/main.yml | 183 ++++++++++++++++++ 4 files changed, 193 insertions(+) create mode 100644 test/integration/roles/test_cs_volume/defaults/main.yml create mode 100644 test/integration/roles/test_cs_volume/meta/main.yml create mode 100644 test/integration/roles/test_cs_volume/tasks/main.yml diff --git a/test/integration/cloudstack.yml b/test/integration/cloudstack.yml index 93ba7876d8c..3ad4ed08349 100644 --- a/test/integration/cloudstack.yml +++ b/test/integration/cloudstack.yml @@ -22,3 +22,4 @@ - { role: test_cs_account, tags: test_cs_account } - { role: test_cs_firewall, tags: test_cs_firewall } - { role: test_cs_loadbalancer_rule, tags: test_cs_loadbalancer_rule } + - { role: test_cs_volume, tags: test_cs_volume } diff --git a/test/integration/roles/test_cs_volume/defaults/main.yml b/test/integration/roles/test_cs_volume/defaults/main.yml new file mode 100644 index 00000000000..546469f33fc --- /dev/null +++ b/test/integration/roles/test_cs_volume/defaults/main.yml @@ -0,0 +1,6 @@ +--- +test_cs_instance_1: "{{ cs_resource_prefix }}-vm1" +test_cs_instance_2: "{{ cs_resource_prefix }}-vm2" +test_cs_instance_template: CentOS 5.3(64-bit) no GUI (Simulator) +test_cs_instance_offering_1: Small Instance +test_cs_disk_offering_1: Small diff --git a/test/integration/roles/test_cs_volume/meta/main.yml b/test/integration/roles/test_cs_volume/meta/main.yml new file mode 100644 index 00000000000..03e38bd4f7a --- /dev/null +++ b/test/integration/roles/test_cs_volume/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - test_cs_common diff --git a/test/integration/roles/test_cs_volume/tasks/main.yml b/test/integration/roles/test_cs_volume/tasks/main.yml new file mode 100644 index 00000000000..fa1f1026028 --- /dev/null +++ b/test/integration/roles/test_cs_volume/tasks/main.yml @@ -0,0 +1,183 @@ +--- +- name: setup + cs_volume: name={{ cs_resource_prefix }}_vol state=absent + register: vol +- name: verify setup + assert: + that: + - vol|success + +- name: setup instance 1 + cs_instance: + name: "{{ test_cs_instance_1 }}" + template: "{{ test_cs_instance_template }}" + service_offering: "{{ test_cs_instance_offering_1 }}" + register: instance +- name: verify create instance + assert: + that: + - instance|success + +- name: setup instance 2 + cs_instance: + name: "{{ test_cs_instance_2 }}" + template: "{{ test_cs_instance_template }}" + service_offering: "{{ test_cs_instance_offering_1 }}" + register: instance +- name: verify create instance + assert: + that: + - instance|success + +- name: test fail if missing name + action: cs_volume + register: vol + ignore_errors: true +- name: verify results of fail if missing name + assert: + that: + - vol|failed + - "vol.msg == 'missing required arguments: name'" + +- name: test create volume + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + disk_offering: "{{ test_cs_disk_offering_1 }}" + register: vol +- name: verify results test create volume + assert: + that: + - vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + +- name: test create volume idempotence + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + disk_offering: "{{ test_cs_disk_offering_1 }}" + register: vol +- name: verify results test create volume idempotence + assert: + that: + - not vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + +- name: test attach volume + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + vm: "{{ test_cs_instance_1 }}" + state: attached + register: vol +- name: verify results test attach volume + assert: + that: + - vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + - vol.vm == "{{ test_cs_instance_1 }}" + - vol.attached is defined + +- name: test attach volume idempotence + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + vm: "{{ test_cs_instance_1 }}" + state: attached + register: vol +- name: verify results test attach volume idempotence + assert: + that: + - not vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + - vol.vm == "{{ test_cs_instance_1 }}" + - vol.attached is defined + +- name: test attach attached volume to another vm + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + vm: "{{ test_cs_instance_2 }}" + state: attached + register: vol +- name: verify results test attach attached volume to another vm + assert: + that: + - vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + - vol.vm == "{{ test_cs_instance_2 }}" + - vol.attached is defined + +- name: test attach attached volume to another vm idempotence + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + vm: "{{ test_cs_instance_2 }}" + state: attached + register: vol +- name: verify results test attach attached volume to another vm idempotence + assert: + that: + - not vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + - vol.vm == "{{ test_cs_instance_2 }}" + - vol.attached is defined + +- name: test detach volume + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + state: detached + register: vol +- name: verify results test detach volume + assert: + that: + - vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + - vol.attached is undefined + +- name: test detach volume idempotence + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + state: detached + register: vol +- name: verify results test detach volume idempotence + assert: + that: + - not vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + - vol.attached is undefined + +- name: test delete volume + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + state: absent + register: vol +- name: verify results test create volume + assert: + that: + - vol|changed + - vol.name == "{{ cs_resource_prefix }}_vol" + +- name: test delete volume idempotence + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + state: absent + register: vol +- name: verify results test delete volume idempotence + assert: + that: + - not vol|changed + +- name: cleanup instance 1 + cs_instance: + name: "{{ test_cs_instance_1 }}" + state: absent + register: instance +- name: verify create instance + assert: + that: + - instance|success + +- name: cleanup instance 2 + cs_instance: + name: "{{ test_cs_instance_2 }}" + state: absent + register: instance +- name: verify create instance + assert: + that: + - instance|success From b0525da8c879faf837fba026c908bf0521e7629f Mon Sep 17 00:00:00 2001 From: Rene Moser Date: Sat, 5 Dec 2015 15:19:43 +0100 Subject: [PATCH 0228/1113] cloudstack: cs_volume: add tests for volume resize See https://github.com/ansible/ansible-modules-extras/pull/1333 --- .../roles/test_cs_volume/defaults/main.yml | 2 +- .../roles/test_cs_volume/tasks/main.yml | 32 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/test/integration/roles/test_cs_volume/defaults/main.yml b/test/integration/roles/test_cs_volume/defaults/main.yml index 546469f33fc..311a99bbe82 100644 --- a/test/integration/roles/test_cs_volume/defaults/main.yml +++ b/test/integration/roles/test_cs_volume/defaults/main.yml @@ -3,4 +3,4 @@ test_cs_instance_1: "{{ cs_resource_prefix }}-vm1" test_cs_instance_2: "{{ cs_resource_prefix }}-vm2" test_cs_instance_template: CentOS 5.3(64-bit) no GUI (Simulator) test_cs_instance_offering_1: Small Instance -test_cs_disk_offering_1: Small +test_cs_disk_offering_1: Custom diff --git a/test/integration/roles/test_cs_volume/tasks/main.yml b/test/integration/roles/test_cs_volume/tasks/main.yml index fa1f1026028..ae57039cee8 100644 --- a/test/integration/roles/test_cs_volume/tasks/main.yml +++ b/test/integration/roles/test_cs_volume/tasks/main.yml @@ -43,22 +43,54 @@ cs_volume: name: "{{ cs_resource_prefix }}_vol" disk_offering: "{{ test_cs_disk_offering_1 }}" + size: 20 register: vol - name: verify results test create volume assert: that: - vol|changed + - vol.size == 20 * 1024 ** 3 - vol.name == "{{ cs_resource_prefix }}_vol" - name: test create volume idempotence cs_volume: name: "{{ cs_resource_prefix }}_vol" disk_offering: "{{ test_cs_disk_offering_1 }}" + size: 20 register: vol - name: verify results test create volume idempotence assert: that: - not vol|changed + - vol.size == 20 * 1024 ** 3 + - vol.name == "{{ cs_resource_prefix }}_vol" + +- name: test shrink volume + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + disk_offering: "{{ test_cs_disk_offering_1 }}" + size: 10 + shrink_ok: yes + register: vol +- name: verify results test create volume + assert: + that: + - vol|changed + - vol.size == 10 * 1024 ** 3 + - vol.name == "{{ cs_resource_prefix }}_vol" + +- name: test shrink volume idempotence + cs_volume: + name: "{{ cs_resource_prefix }}_vol" + disk_offering: "{{ test_cs_disk_offering_1 }}" + size: 10 + shrink_ok: yes + register: vol +- name: verify results test create volume + assert: + that: + - not vol|changed + - vol.size == 10 * 1024 ** 3 - vol.name == "{{ cs_resource_prefix }}_vol" - name: test attach volume From b85b92ecdd03429fd84d384a495fbb5894da9ab0 Mon Sep 17 00:00:00 2001 From: Rene Moser Date: Mon, 14 Dec 2015 14:23:44 +0100 Subject: [PATCH 0229/1113] cloudstack: test_cs_instance: more integration tests cloudstack: extend test_cs_instance addressing recovering cloudstack: test_cs_instance: add tests for using display_name as indentifier. --- .../roles/test_cs_instance/tasks/absent.yml | 20 ++ .../tasks/absent_display_name.yml | 43 +++++ .../roles/test_cs_instance/tasks/cleanup.yml | 6 - .../roles/test_cs_instance/tasks/main.yml | 5 + .../roles/test_cs_instance/tasks/present.yml | 37 +++- .../tasks/present_display_name.yml | 176 ++++++++++++++++++ .../roles/test_cs_instance/tasks/setup.yml | 8 - 7 files changed, 272 insertions(+), 23 deletions(-) create mode 100644 test/integration/roles/test_cs_instance/tasks/absent_display_name.yml create mode 100644 test/integration/roles/test_cs_instance/tasks/present_display_name.yml diff --git a/test/integration/roles/test_cs_instance/tasks/absent.yml b/test/integration/roles/test_cs_instance/tasks/absent.yml index bafb3ec9e76..eeab47a61d7 100644 --- a/test/integration/roles/test_cs_instance/tasks/absent.yml +++ b/test/integration/roles/test_cs_instance/tasks/absent.yml @@ -21,3 +21,23 @@ that: - instance|success - not instance|changed + +- name: test recover to stopped state and update a deleted instance + cs_instance: + name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + service_offering: "{{ test_cs_instance_offering_1 }}" + state: stopped + register: instance +- name: verify test recover to stopped state and update a deleted instance + assert: + that: + - instance|success + - instance|changed + - instance.state == "Stopped" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" + +# force expunge, only works with admin permissions +- cs_instance: + name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + state: expunged + failed_when: false diff --git a/test/integration/roles/test_cs_instance/tasks/absent_display_name.yml b/test/integration/roles/test_cs_instance/tasks/absent_display_name.yml new file mode 100644 index 00000000000..35fa6dff34f --- /dev/null +++ b/test/integration/roles/test_cs_instance/tasks/absent_display_name.yml @@ -0,0 +1,43 @@ +--- +- name: test destroy instance with display_name + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + state: absent + register: instance +- name: verify destroy instance with display_name + assert: + that: + - instance|success + - instance|changed + - instance.state == "Destroyed" + +- name: test destroy instance with display_name idempotence + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + state: absent + register: instance +- name: verify destroy instance with display_name idempotence + assert: + that: + - instance|success + - not instance|changed + +- name: test recover to stopped state and update a deleted instance with display_name + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + service_offering: "{{ test_cs_instance_offering_1 }}" + state: stopped + register: instance +- name: verify test recover to stopped state and update a deleted instance with display_name + assert: + that: + - instance|success + - instance|changed + - instance.state == "Stopped" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" + +# force expunge, only works with admin permissions +- cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + state: expunged + failed_when: false diff --git a/test/integration/roles/test_cs_instance/tasks/cleanup.yml b/test/integration/roles/test_cs_instance/tasks/cleanup.yml index 63192dbd608..e6b6550dfa1 100644 --- a/test/integration/roles/test_cs_instance/tasks/cleanup.yml +++ b/test/integration/roles/test_cs_instance/tasks/cleanup.yml @@ -28,9 +28,3 @@ assert: that: - sg|success - -# force expunge, only works with admin permissions -- cs_instance: - name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" - state: expunged - failed_when: false diff --git a/test/integration/roles/test_cs_instance/tasks/main.yml b/test/integration/roles/test_cs_instance/tasks/main.yml index d1a67e17810..d6475a47664 100644 --- a/test/integration/roles/test_cs_instance/tasks/main.yml +++ b/test/integration/roles/test_cs_instance/tasks/main.yml @@ -4,3 +4,8 @@ - include: tags.yml - include: absent.yml - include: cleanup.yml + +- include: setup.yml +- include: present_display_name.yml +- include: absent_display_name.yml +- include: cleanup.yml diff --git a/test/integration/roles/test_cs_instance/tasks/present.yml b/test/integration/roles/test_cs_instance/tasks/present.yml index 10242a57fd2..ad3d391ef9c 100644 --- a/test/integration/roles/test_cs_instance/tasks/present.yml +++ b/test/integration/roles/test_cs_instance/tasks/present.yml @@ -1,4 +1,12 @@ --- +- name: setup instance to be absent + cs_instance: name={{ cs_resource_prefix }}-vm-{{ instance_number }} state=absent + register: instance +- name: verify instance to be absent + assert: + that: + - instance|success + - name: test create instance cs_instance: name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" @@ -21,7 +29,6 @@ - instance.ssh_key == "{{ cs_resource_prefix }}-sshkey" - not instance.tags - - name: test create instance idempotence cs_instance: name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" @@ -44,7 +51,6 @@ - instance.ssh_key == "{{ cs_resource_prefix }}-sshkey" - not instance.tags - - name: test running instance not updated cs_instance: name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" @@ -60,7 +66,6 @@ - instance.service_offering == "{{ test_cs_instance_offering_1 }}" - instance.state == "Running" - - name: test stopping instance cs_instance: name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" @@ -76,7 +81,6 @@ - instance.service_offering == "{{ test_cs_instance_offering_1 }}" - instance.state == "Stopped" - - name: test stopping instance idempotence cs_instance: name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" @@ -89,7 +93,6 @@ - not instance|changed - instance.state == "Stopped" - - name: test updating stopped instance cs_instance: name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" @@ -106,7 +109,6 @@ - instance.service_offering == "{{ test_cs_instance_offering_2 }}" - instance.state == "Stopped" - - name: test starting instance cs_instance: name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" @@ -122,7 +124,6 @@ - instance.service_offering == "{{ test_cs_instance_offering_2 }}" - instance.state == "Running" - - name: test starting instance idempotence cs_instance: name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" @@ -133,6 +134,9 @@ that: - instance|success - not instance|changed + - instance.name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.display_name == "{{ cs_resource_prefix }}-display-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_2 }}" - instance.state == "Running" - name: test force update running instance @@ -147,7 +151,7 @@ - instance|success - instance|changed - instance.name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" - - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.display_name == "{{ cs_resource_prefix }}-display-{{ instance_number }}" - instance.service_offering == "{{ test_cs_instance_offering_1 }}" - instance.state == "Running" @@ -163,6 +167,21 @@ - instance|success - not instance|changed - instance.name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" - - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.display_name == "{{ cs_resource_prefix }}-display-{{ instance_number }}" - instance.service_offering == "{{ test_cs_instance_offering_1 }}" - instance.state == "Running" + +- name: test restore instance + cs_instance: + name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + template: "{{ test_cs_instance_template }}" + state: restored + register: instance +- name: verify restore instance + assert: + that: + - instance|success + - instance|changed + - instance.name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.display_name == "{{ cs_resource_prefix }}-display-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" diff --git a/test/integration/roles/test_cs_instance/tasks/present_display_name.yml b/test/integration/roles/test_cs_instance/tasks/present_display_name.yml new file mode 100644 index 00000000000..c1882149d9d --- /dev/null +++ b/test/integration/roles/test_cs_instance/tasks/present_display_name.yml @@ -0,0 +1,176 @@ +--- +- name: setup instance with display_name to be absent + cs_instance: display_name={{ cs_resource_prefix }}-vm-{{ instance_number }} state=absent + register: instance +- name: verify instance with display_name to be absent + assert: + that: + - instance|success + +- name: test create instance with display_name + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + template: "{{ test_cs_instance_template }}" + service_offering: "{{ test_cs_instance_offering_1 }}" + affinity_group: "{{ cs_resource_prefix }}-ag" + security_group: "{{ cs_resource_prefix }}-sg" + ssh_key: "{{ cs_resource_prefix }}-sshkey" + tags: [] + register: instance +- name: verify create instance with display_name + assert: + that: + - instance|success + - instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" + - instance.state == "Running" + - instance.ssh_key == "{{ cs_resource_prefix }}-sshkey" + - not instance.tags + +- name: test create instance with display_name idempotence + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + template: "{{ test_cs_instance_template }}" + service_offering: "{{ test_cs_instance_offering_1 }}" + affinity_group: "{{ cs_resource_prefix }}-ag" + security_group: "{{ cs_resource_prefix }}-sg" + ssh_key: "{{ cs_resource_prefix }}-sshkey" + tags: [] + register: instance +- name: verify create instance with display_name idempotence + assert: + that: + - instance|success + - not instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" + - instance.state == "Running" + - instance.ssh_key == "{{ cs_resource_prefix }}-sshkey" + - not instance.tags + +- name: test running instance with display_name not updated + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + service_offering: "{{ test_cs_instance_offering_2 }}" + register: instance +- name: verify running instance with display_name not updated + assert: + that: + - instance|success + - not instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" + - instance.state == "Running" + +- name: test stopping instance with display_name + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + state: stopped + register: instance +- name: verify stopping instance with display_name + assert: + that: + - instance|success + - instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" + - instance.state == "Stopped" + +- name: test stopping instance with display_name idempotence + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + state: stopped + register: instance +- name: verify stopping instance idempotence + assert: + that: + - instance|success + - not instance|changed + - instance.state == "Stopped" + +- name: test updating stopped instance with display_name + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + service_offering: "{{ test_cs_instance_offering_2 }}" + register: instance +- name: verify updating stopped instance with display_name + assert: + that: + - instance|success + - instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_2 }}" + - instance.state == "Stopped" + +- name: test starting instance with display_name + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + state: started + register: instance +- name: verify starting instance with display_name + assert: + that: + - instance|success + - instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_2 }}" + - instance.state == "Running" + +- name: test starting instance with display_name idempotence + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + state: started + register: instance +- name: verify starting instance with display_name idempotence + assert: + that: + - instance|success + - not instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_2 }}" + - instance.state == "Running" + +- name: test force update running instance with display_name + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + service_offering: "{{ test_cs_instance_offering_1 }}" + force: true + register: instance +- name: verify force update running instance with display_name + assert: + that: + - instance|success + - instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" + - instance.state == "Running" + +- name: test force update running instance with display_name idempotence + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + service_offering: "{{ test_cs_instance_offering_1 }}" + force: true + register: instance +- name: verify force update running instance with display_name idempotence + assert: + that: + - instance|success + - not instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" + - instance.state == "Running" + +- name: test restore instance with display_name + cs_instance: + display_name: "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + template: "{{ test_cs_instance_template }}" + state: restored + register: instance +- name: verify restore instance with display_name + assert: + that: + - instance|success + - instance|changed + - instance.display_name == "{{ cs_resource_prefix }}-vm-{{ instance_number }}" + - instance.service_offering == "{{ test_cs_instance_offering_1 }}" diff --git a/test/integration/roles/test_cs_instance/tasks/setup.yml b/test/integration/roles/test_cs_instance/tasks/setup.yml index 32f3ff13e24..0039ce8f1be 100644 --- a/test/integration/roles/test_cs_instance/tasks/setup.yml +++ b/test/integration/roles/test_cs_instance/tasks/setup.yml @@ -22,11 +22,3 @@ assert: that: - sg|success - -- name: setup instance to be absent - cs_instance: name={{ cs_resource_prefix }}-vm-{{ instance_number }} state=absent - register: instance -- name: verify instance to be absent - assert: - that: - - instance|success From 3a57d9472c6788ce6fbb700108fbc776527fc3df Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Sun, 20 Dec 2015 17:55:39 -0500 Subject: [PATCH 0230/1113] Save output of integration test results to files we can archive --- .../roles/run_integration/tasks/main.yml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml index 2d01999dbfd..f67f088246c 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml @@ -28,4 +28,14 @@ delay: 30 ignore_errors: true -- debug: var=test_results +- name: save stdout test results for each host + local_action: copy + args: + dest: "{{sync_dir}}/{{inventory_hostname}}.stdout_results.txt" + content: "{{test_results.stdout}}" + +- name: save stderr test results for each host + local_action: copy + args: + dest: "{{sync_dir}}/{{inventory_hostname}}.stderr_results.txt" + content: "{{test_results.stderr}}" From 54455a06e55756b31493fd25b1871146c8fe6ab2 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Sun, 20 Dec 2015 21:32:37 -0500 Subject: [PATCH 0231/1113] Disable docker test for Fedora, due to broken packaging --- test/integration/destructive.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/destructive.yml b/test/integration/destructive.yml index 626124d14f1..3e8cca385e6 100644 --- a/test/integration/destructive.yml +++ b/test/integration/destructive.yml @@ -17,5 +17,5 @@ - { role: test_mysql_db, tags: test_mysql_db} - { role: test_mysql_user, tags: test_mysql_user} - { role: test_mysql_variables, tags: test_mysql_variables} - - { role: test_docker, tags: test_docker} + - { role: test_docker, tags: test_docker, when: ansible_distribution != "Fedora" } - { role: test_zypper, tags: test_zypper} From a4674906c60da6035345c2bbe89983b5a6e3b69d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yannig=20Perr=C3=A9?= Date: Mon, 21 Dec 2015 13:01:58 -0500 Subject: [PATCH 0232/1113] Merge role params into variables separately from other variables Fixes #13617 --- lib/ansible/playbook/role/__init__.py | 6 ++++++ lib/ansible/vars/__init__.py | 1 + 2 files changed, 7 insertions(+) diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py index f308954f528..ce82573dc03 100644 --- a/lib/ansible/playbook/role/__init__.py +++ b/lib/ansible/playbook/role/__init__.py @@ -265,6 +265,12 @@ class Role(Base, Become, Conditional, Taggable): inherited_vars = combine_vars(inherited_vars, parent._role_params) return inherited_vars + def get_role_params(self): + params = {} + for dep in self.get_all_dependencies(): + params = combine_vars(params, dep._role_params) + return params + def get_vars(self, dep_chain=[], include_params=True): all_vars = self.get_inherited_vars(dep_chain, include_params=include_params) diff --git a/lib/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py index 1184ec50492..699333a5896 100644 --- a/lib/ansible/vars/__init__.py +++ b/lib/ansible/vars/__init__.py @@ -308,6 +308,7 @@ class VariableManager: if not C.DEFAULT_PRIVATE_ROLE_VARS: for role in play.get_roles(): + all_vars = combine_vars(all_vars, role.get_role_params()) all_vars = combine_vars(all_vars, role.get_vars(include_params=False)) if task: From 593d80c63d408012550850eb06d85387588cee3b Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 21 Dec 2015 13:14:51 -0500 Subject: [PATCH 0233/1113] role search path clarified --- docsite/rst/playbooks_roles.rst | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/docsite/rst/playbooks_roles.rst b/docsite/rst/playbooks_roles.rst index c6c01db5d48..2e1173acda9 100644 --- a/docsite/rst/playbooks_roles.rst +++ b/docsite/rst/playbooks_roles.rst @@ -191,11 +191,8 @@ This designates the following behaviors, for each role 'x': - If roles/x/handlers/main.yml exists, handlers listed therein will be added to the play - If roles/x/vars/main.yml exists, variables listed therein will be added to the play - If roles/x/meta/main.yml exists, any role dependencies listed therein will be added to the list of roles (1.3 and later) -- Any copy tasks can reference files in roles/x/files/ without having to path them relatively or absolutely -- Any script tasks can reference scripts in roles/x/files/ without having to path them relatively or absolutely -- Any template tasks can reference files in roles/x/templates/ without having to path them relatively or absolutely -- Any include tasks can reference files in roles/x/tasks/ without having to path them relatively or absolutely - +- Any copy, script, template or include tasks (in the role) can reference files in roles/x/files/ without having to path them relatively or absolutely + In Ansible 1.4 and later you can configure a roles_path to search for roles. Use this to check all of your common roles out to one location, and share them easily between multiple playbook projects. See :doc:`intro_configuration` for details about how to set this up in ansible.cfg. From 75e94e0cba538c9ed532374b219c45e91fd89db8 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 21 Dec 2015 13:06:48 -0500 Subject: [PATCH 0234/1113] allow for non standard hostnames * Changed parse_addresses to throw exceptions instead of passing None * Switched callers to trap and pass through the original values. * Added very verbose notice * Look at deprecating this and possibly validate at plugin instead fixes #13608 --- lib/ansible/inventory/__init__.py | 21 ++++++++++++--------- lib/ansible/inventory/ini.py | 11 +++++++---- lib/ansible/parsing/utils/addresses.py | 22 +++++++++++----------- lib/ansible/plugins/action/add_host.py | 10 +++++++--- test/units/parsing/test_addresses.py | 14 ++++++++++++-- 5 files changed, 49 insertions(+), 29 deletions(-) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index 95e193f381a..095118e50eb 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -109,7 +109,12 @@ class Inventory(object): pass elif isinstance(host_list, list): for h in host_list: - (host, port) = parse_address(h, allow_ranges=False) + try: + (host, port) = parse_address(h, allow_ranges=False) + except AnsibleError as e: + display.vvv("Unable to parse address from hostname, leaving unchanged: %s" % to_string(e)) + host = h + port = None all.add_host(Host(host, port)) elif self._loader.path_exists(host_list): #TODO: switch this to a plugin loader and a 'condition' per plugin on which it should be tried, restoring 'inventory pllugins' @@ -228,15 +233,13 @@ class Inventory(object): # If it doesn't, it could still be a single pattern. This accounts for # non-separator uses of colons: IPv6 addresses and [x:y] host ranges. else: - (base, port) = parse_address(pattern, allow_ranges=True) - if base: + try: + (base, port) = parse_address(pattern, allow_ranges=True) patterns = [pattern] - - # The only other case we accept is a ':'-separated list of patterns. - # This mishandles IPv6 addresses, and is retained only for backwards - # compatibility. - - else: + except: + # The only other case we accept is a ':'-separated list of patterns. + # This mishandles IPv6 addresses, and is retained only for backwards + # compatibility. patterns = re.findall( r'''(?: # We want to match something comprising: [^\s:\[\]] # (anything other than whitespace or ':[]' diff --git a/lib/ansible/inventory/ini.py b/lib/ansible/inventory/ini.py index 537fde1ef9e..9224ef2d23d 100644 --- a/lib/ansible/inventory/ini.py +++ b/lib/ansible/inventory/ini.py @@ -23,7 +23,7 @@ import ast import re from ansible import constants as C -from ansible.errors import AnsibleError +from ansible.errors import AnsibleError, AnsibleParserError from ansible.inventory.host import Host from ansible.inventory.group import Group from ansible.inventory.expand_hosts import detect_range @@ -264,9 +264,12 @@ class InventoryParser(object): # Can the given hostpattern be parsed as a host with an optional port # specification? - (pattern, port) = parse_address(hostpattern, allow_ranges=True) - if not pattern: - self._raise_error("Can't parse '%s' as host[:port]" % hostpattern) + try: + (pattern, port) = parse_address(hostpattern, allow_ranges=True) + except: + # not a recognizable host pattern + pattern = hostpattern + port = None # Once we have separated the pattern, we expand it into list of one or # more hostnames, depending on whether it contains any [x:y] ranges. diff --git a/lib/ansible/parsing/utils/addresses.py b/lib/ansible/parsing/utils/addresses.py index 387f05c627f..ebfd850ac6a 100644 --- a/lib/ansible/parsing/utils/addresses.py +++ b/lib/ansible/parsing/utils/addresses.py @@ -20,6 +20,7 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type import re +from ansible.errors import AnsibleParserError, AnsibleError # Components that match a numeric or alphanumeric begin:end or begin:end:step # range expression inside square brackets. @@ -162,6 +163,7 @@ patterns = { $ '''.format(label=label), re.X|re.I|re.UNICODE ), + } def parse_address(address, allow_ranges=False): @@ -183,8 +185,8 @@ def parse_address(address, allow_ranges=False): # First, we extract the port number if one is specified. port = None - for type in ['bracketed_hostport', 'hostport']: - m = patterns[type].match(address) + for matching in ['bracketed_hostport', 'hostport']: + m = patterns[matching].match(address) if m: (address, port) = m.groups() port = int(port) @@ -194,22 +196,20 @@ def parse_address(address, allow_ranges=False): # numeric ranges, or a hostname with alphanumeric ranges. host = None - for type in ['ipv4', 'ipv6', 'hostname']: - m = patterns[type].match(address) + for matching in ['ipv4', 'ipv6', 'hostname']: + m = patterns[matching].match(address) if m: host = address continue # If it isn't any of the above, we don't understand it. - if not host: - return (None, None) - - # If we get to this point, we know that any included ranges are valid. If - # the caller is prepared to handle them, all is well. Otherwise we treat - # it as a parse failure. + raise AnsibleError("Not a valid network hostname: %s" % address) + # If we get to this point, we know that any included ranges are valid. + # If the caller is prepared to handle them, all is well. + # Otherwise we treat it as a parse failure. if not allow_ranges and '[' in host: - return (None, None) + raise AnsibleParserError("Detected range in host but was asked to ignore ranges") return (host, port) diff --git a/lib/ansible/plugins/action/add_host.py b/lib/ansible/plugins/action/add_host.py index 4bf43f14009..b3aec20437e 100644 --- a/lib/ansible/plugins/action/add_host.py +++ b/lib/ansible/plugins/action/add_host.py @@ -53,9 +53,13 @@ class ActionModule(ActionBase): new_name = self._task.args.get('name', self._task.args.get('hostname', None)) display.vv("creating host via 'add_host': hostname=%s" % new_name) - name, port = parse_address(new_name, allow_ranges=False) - if not name: - raise AnsibleError("Invalid inventory hostname: %s" % new_name) + try: + name, port = parse_address(new_name, allow_ranges=False) + except: + # not a parsable hostname, but might still be usable + name = new_name + port = None + if port: self._task.args['ansible_ssh_port'] = port diff --git a/test/units/parsing/test_addresses.py b/test/units/parsing/test_addresses.py index 870cbb0a14a..a688d0253bd 100644 --- a/test/units/parsing/test_addresses.py +++ b/test/units/parsing/test_addresses.py @@ -71,7 +71,12 @@ class TestParseAddress(unittest.TestCase): for t in self.tests: test = self.tests[t] - (host, port) = parse_address(t) + try: + (host, port) = parse_address(t) + except: + host = None + port = None + assert host == test[0] assert port == test[1] @@ -79,6 +84,11 @@ class TestParseAddress(unittest.TestCase): for t in self.range_tests: test = self.range_tests[t] - (host, port) = parse_address(t, allow_ranges=True) + try: + (host, port) = parse_address(t, allow_ranges=True) + except: + host = None + port = None + assert host == test[0] assert port == test[1] From 08b580decce79deac3c7c2d828d6a8ef9dd6e70c Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 21 Dec 2015 14:09:02 -0500 Subject: [PATCH 0235/1113] Parallelize make command for integration test runner Also adds a new var, used by the prepare_tests role, to prevent it from deleting the temp test directory at the start of each play to avoid any potential race conditions --- test/integration/roles/prepare_tests/tasks/main.yml | 1 + .../roles/run_integration/tasks/main.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/test/integration/roles/prepare_tests/tasks/main.yml b/test/integration/roles/prepare_tests/tasks/main.yml index 3641880baa1..7983ea52361 100644 --- a/test/integration/roles/prepare_tests/tasks/main.yml +++ b/test/integration/roles/prepare_tests/tasks/main.yml @@ -22,6 +22,7 @@ always_run: True tags: - prepare + when: clean_working_dir|default("yes")|bool - name: create the test directory file: name={{output_dir}} state=directory diff --git a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml index f67f088246c..8a306a8ada4 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml @@ -9,7 +9,7 @@ shell: "cd ~/ansible && pwd" register: results -- shell: "ls -la && . hacking/env-setup && cd test/integration && make {{ run_integration_make_target }}" +- shell: "ls -la && . hacking/env-setup && cd test/integration && TEST_FLAGS='-e clean_working_dir=no' make -j4 {{ run_integration_make_target }}" args: chdir: "{{ results.stdout }}" async: 3600 From 6d6822e66e43658c01b68bab2ed897e0ef31c784 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 21 Dec 2015 14:37:17 -0500 Subject: [PATCH 0236/1113] Kick up the integration runner test image size --- test/utils/ansible-playbook_integration_runner/ec2.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/ec2.yml b/test/utils/ansible-playbook_integration_runner/ec2.yml index d4740d95708..55619776d90 100644 --- a/test/utils/ansible-playbook_integration_runner/ec2.yml +++ b/test/utils/ansible-playbook_integration_runner/ec2.yml @@ -2,7 +2,7 @@ ec2: group_id: 'sg-07bb906d' # jenkins-slave_new count: 1 - instance_type: 'm3.medium' + instance_type: 'm3.large' image: '{{ item.image }}' wait: true region: 'us-east-1' From 45afa642c3a69d209fefd7debfb38df9d8b757fd Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 21 Dec 2015 15:48:58 -0500 Subject: [PATCH 0237/1113] Integration test runner tweaks --- test/utils/ansible-playbook_integration_runner/ec2.yml | 2 +- .../roles/run_integration/tasks/main.yml | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/test/utils/ansible-playbook_integration_runner/ec2.yml b/test/utils/ansible-playbook_integration_runner/ec2.yml index 55619776d90..8a48f0ce6e2 100644 --- a/test/utils/ansible-playbook_integration_runner/ec2.yml +++ b/test/utils/ansible-playbook_integration_runner/ec2.yml @@ -2,7 +2,7 @@ ec2: group_id: 'sg-07bb906d' # jenkins-slave_new count: 1 - instance_type: 'm3.large' + instance_type: 'm3.xlarge' image: '{{ item.image }}' wait: true region: 'us-east-1' diff --git a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml index 8a306a8ada4..6b37d85c2e7 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml @@ -3,13 +3,14 @@ synchronize: src: "{{ sync_dir }}/" dest: "~/ansible" + no_log: true - name: Get ansible source dir sudo: false shell: "cd ~/ansible && pwd" register: results -- shell: "ls -la && . hacking/env-setup && cd test/integration && TEST_FLAGS='-e clean_working_dir=no' make -j4 {{ run_integration_make_target }}" +- shell: "ls -la && . hacking/env-setup && cd test/integration && TEST_FLAGS='-e clean_working_dir=no' make -j2 {{ run_integration_make_target }}" args: chdir: "{{ results.stdout }}" async: 3600 @@ -27,6 +28,7 @@ retries: 120 delay: 30 ignore_errors: true + no_log: true - name: save stdout test results for each host local_action: copy From 8119ea37afe5e94a1d98cec9fe7ae760b10a9adc Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 21 Dec 2015 15:55:16 -0500 Subject: [PATCH 0238/1113] Dropping instance size back down since we're not doing parallel builds --- test/utils/ansible-playbook_integration_runner/ec2.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/ec2.yml b/test/utils/ansible-playbook_integration_runner/ec2.yml index 8a48f0ce6e2..55619776d90 100644 --- a/test/utils/ansible-playbook_integration_runner/ec2.yml +++ b/test/utils/ansible-playbook_integration_runner/ec2.yml @@ -2,7 +2,7 @@ ec2: group_id: 'sg-07bb906d' # jenkins-slave_new count: 1 - instance_type: 'm3.xlarge' + instance_type: 'm3.large' image: '{{ item.image }}' wait: true region: 'us-east-1' From d22bbbf52c08e03b63d6045768f3000531f875e9 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 21 Dec 2015 16:11:53 -0500 Subject: [PATCH 0239/1113] Actually disable parallel makes for integration runner --- .../roles/run_integration/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml index 6b37d85c2e7..a833c96558d 100644 --- a/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml +++ b/test/utils/ansible-playbook_integration_runner/roles/run_integration/tasks/main.yml @@ -10,7 +10,7 @@ shell: "cd ~/ansible && pwd" register: results -- shell: "ls -la && . hacking/env-setup && cd test/integration && TEST_FLAGS='-e clean_working_dir=no' make -j2 {{ run_integration_make_target }}" +- shell: "ls -la && . hacking/env-setup && cd test/integration && TEST_FLAGS='-e clean_working_dir=no' make {{ run_integration_make_target }}" args: chdir: "{{ results.stdout }}" async: 3600 From 0c013f592a31c06baac7aadf27d23598f6abe931 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Mon, 21 Dec 2015 13:52:41 -0800 Subject: [PATCH 0240/1113] Transform the command we pass to subprocess into a byte string in _low_level-exec_command --- lib/ansible/plugins/action/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index e9b18651d66..e88a55a15cc 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -487,7 +487,8 @@ class ActionBase(with_metaclass(ABCMeta, object)): verbatim, then this won't work. May have to use some sort of replacement strategy (python3 could use surrogateescape) ''' - + # We may need to revisit this later. + cmd = to_bytes(cmd, errors='strict') if executable is not None: cmd = executable + ' -c ' + cmd From 0f4d1eb051ce4aa6863f6ec86b00b43ccc277c5a Mon Sep 17 00:00:00 2001 From: Andrew Gaffney Date: Fri, 18 Dec 2015 01:56:15 +0000 Subject: [PATCH 0241/1113] Add 'filtered' stdout callback plugin This plugin filters output for any task that is 'ok' or 'skipped'. It works by subclassing the 'default' stdout callback plugin and overriding certain functions. It will suppress display of the task banner until there is a 'changed' or 'failed' result or an unreachable host. --- lib/ansible/plugins/callback/filtered.py | 76 ++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 lib/ansible/plugins/callback/filtered.py diff --git a/lib/ansible/plugins/callback/filtered.py b/lib/ansible/plugins/callback/filtered.py new file mode 100644 index 00000000000..094c37ed985 --- /dev/null +++ b/lib/ansible/plugins/callback/filtered.py @@ -0,0 +1,76 @@ +# (c) 2015, Andrew Gaffney +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.plugins.callback.default import CallbackModule as CallbackModule_default + +class CallbackModule(CallbackModule_default): + + CALLBACK_VERSION = 2.0 + CALLBACK_TYPE = 'stdout' + CALLBACK_NAME = 'filtered' + + def __init__(self): + self.super_ref = super(CallbackModule, self) + self.super_ref.__init__() + self.last_task = None + self.shown_title = False + + def v2_playbook_on_task_start(self, task, is_conditional): + self.last_task = task + self.shown_title = False + + def display_task_banner(self): + if not self.shown_title: + self.super_ref.v2_playbook_on_task_start(self.last_task, None) + self.shown_title = True + + def v2_runner_on_failed(self, result, ignore_errors=False): + self.display_task_banner() + self.super_ref.v2_runner_on_failed(result, ignore_errors) + + def v2_runner_on_ok(self, result): + if result._result.get('changed', False): + self.display_task_banner() + self.super_ref.v2_runner_on_ok(result) + else: + pass + + def v2_runner_on_unreachable(self, result): + self.display_task_banner() + self.super_ref.v2_runner_on_unreachable(result) + + def v2_runner_on_skipped(self, result): + pass + + def v2_playbook_on_include(self, included_file): + pass + + def v2_playbook_item_on_ok(self, result): + self.display_task_banner() + self.super_ref.v2_playbook_item_on_ok(result) + + def v2_playbook_item_on_skipped(self, result): + pass + + def v2_playbook_item_on_failed(self, result): + self.display_task_banner() + self.super_ref.v2_playbook_item_on_failed(result) + From bbdfaf052209242fbd262860aeda81e59d694243 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 22 Dec 2015 00:24:35 -0500 Subject: [PATCH 0242/1113] move hostvars.vars to vars this fixes duplication under hostvars and exposes all vars in the vars dict which makes dynamic reference possible on 'non hostvars' --- lib/ansible/vars/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py index 699333a5896..4135ff17687 100644 --- a/lib/ansible/vars/__init__.py +++ b/lib/ansible/vars/__init__.py @@ -259,8 +259,6 @@ class VariableManager: except KeyError: pass - all_vars['vars'] = all_vars.copy() - if play: all_vars = combine_vars(all_vars, play.get_vars()) @@ -343,6 +341,8 @@ class VariableManager: all_vars['ansible_delegated_vars'] = self._get_delegated_vars(loader, play, task, all_vars) #VARIABLE_CACHE[cache_entry] = all_vars + if task or play: + all_vars['vars'] = all_vars.copy() debug("done with get_vars()") return all_vars From c60749c9222c8139042a0f4280d6622b209de550 Mon Sep 17 00:00:00 2001 From: Monty Taylor Date: Tue, 22 Dec 2015 09:14:12 -0600 Subject: [PATCH 0243/1113] Also convert ints to bool for type=bool --- lib/ansible/module_utils/basic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index 62b8cadfd61..8a135b300f1 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -1274,7 +1274,7 @@ class AnsibleModule(object): if isinstance(value, bool): return value - if isinstance(value, basestring): + if isinstance(value, basestring) or isinstance(value, int): return self.boolean(value) raise TypeError('%s cannot be converted to a bool' % type(value)) From b310d0ce76c05bb7a7a47aa7b7537b9adc916171 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Tue, 22 Dec 2015 07:22:44 -0800 Subject: [PATCH 0244/1113] Update the developing doc to modern method of specifying bool argspec values --- docsite/rst/developing_modules.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docsite/rst/developing_modules.rst b/docsite/rst/developing_modules.rst index fde4b5704b6..39bfd9e3d9c 100644 --- a/docsite/rst/developing_modules.rst +++ b/docsite/rst/developing_modules.rst @@ -247,7 +247,7 @@ And instantiating the module class like:: argument_spec = dict( state = dict(default='present', choices=['present', 'absent']), name = dict(required=True), - enabled = dict(required=True, choices=BOOLEANS), + enabled = dict(required=True, type='bool'), something = dict(aliases=['whatever']) ) ) @@ -335,7 +335,7 @@ and guidelines: * If you have a company module that returns facts specific to your installations, a good name for this module is `site_facts`. -* Modules accepting boolean status should generally accept 'yes', 'no', 'true', 'false', or anything else a user may likely throw at them. The AnsibleModule common code supports this with "choices=BOOLEANS" and a module.boolean(value) casting function. +* Modules accepting boolean status should generally accept 'yes', 'no', 'true', 'false', or anything else a user may likely throw at them. The AnsibleModule common code supports this with "type='bool'" and a module.boolean(value) casting function. * Include a minimum of dependencies if possible. If there are dependencies, document them at the top of the module file, and have the module raise JSON error messages when the import fails. From b33f72636a3b7f3a256185afde1aae3d9703235e Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Tue, 22 Dec 2015 07:25:50 -0800 Subject: [PATCH 0245/1113] Also remove the bool casting function info (transparent to module writer now) --- docsite/rst/developing_modules.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/developing_modules.rst b/docsite/rst/developing_modules.rst index 39bfd9e3d9c..141f81bd08b 100644 --- a/docsite/rst/developing_modules.rst +++ b/docsite/rst/developing_modules.rst @@ -335,7 +335,7 @@ and guidelines: * If you have a company module that returns facts specific to your installations, a good name for this module is `site_facts`. -* Modules accepting boolean status should generally accept 'yes', 'no', 'true', 'false', or anything else a user may likely throw at them. The AnsibleModule common code supports this with "type='bool'" and a module.boolean(value) casting function. +* Modules accepting boolean status should generally accept 'yes', 'no', 'true', 'false', or anything else a user may likely throw at them. The AnsibleModule common code supports this with "type='bool'". * Include a minimum of dependencies if possible. If there are dependencies, document them at the top of the module file, and have the module raise JSON error messages when the import fails. From c4da5840b5e38aea1740e68f7100256c93dfbb17 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Tue, 22 Dec 2015 08:22:02 -0800 Subject: [PATCH 0246/1113] Convert to bytes later so that make_become_command can jsut operate on text type. --- lib/ansible/plugins/action/__init__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index e88a55a15cc..765ba663164 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -487,8 +487,6 @@ class ActionBase(with_metaclass(ABCMeta, object)): verbatim, then this won't work. May have to use some sort of replacement strategy (python3 could use surrogateescape) ''' - # We may need to revisit this later. - cmd = to_bytes(cmd, errors='strict') if executable is not None: cmd = executable + ' -c ' + cmd @@ -505,7 +503,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): cmd = self._play_context.make_become_cmd(cmd, executable=executable) display.debug("_low_level_execute_command(): executing: %s" % (cmd,)) - rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable) + rc, stdout, stderr = self._connection.exec_command(to_bytes(cmd, errors='strict'), in_data=in_data, sudoable=sudoable) # stdout and stderr may be either a file-like or a bytes object. # Convert either one to a text type From b22d998d1d9acbda6f458ea99d7e5266d69e035c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yannig=20Perr=C3=A9?= Date: Tue, 22 Dec 2015 16:30:29 +0100 Subject: [PATCH 0247/1113] Fix make tests-py3 on devel. Fix for https://github.com/ansible/ansible/issues/13638. --- test/units/plugins/action/test_action.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/units/plugins/action/test_action.py b/test/units/plugins/action/test_action.py index 0e47b6a5381..dcd04375959 100644 --- a/test/units/plugins/action/test_action.py +++ b/test/units/plugins/action/test_action.py @@ -42,14 +42,14 @@ class TestActionBase(unittest.TestCase): play_context.become = True play_context.become_user = play_context.remote_user = 'root' - play_context.make_become_cmd = Mock(return_value='CMD') + play_context.make_become_cmd = Mock(return_value=b'CMD') - action_base._low_level_execute_command('ECHO', sudoable=True) + action_base._low_level_execute_command(b'ECHO', sudoable=True) play_context.make_become_cmd.assert_not_called() play_context.remote_user = 'apo' - action_base._low_level_execute_command('ECHO', sudoable=True) - play_context.make_become_cmd.assert_called_once_with('ECHO', executable=None) + action_base._low_level_execute_command(b'ECHO', sudoable=True) + play_context.make_become_cmd.assert_called_once_with(b'ECHO', executable=None) play_context.make_become_cmd.reset_mock() @@ -57,7 +57,7 @@ class TestActionBase(unittest.TestCase): C.BECOME_ALLOW_SAME_USER = True try: play_context.remote_user = 'root' - action_base._low_level_execute_command('ECHO SAME', sudoable=True) - play_context.make_become_cmd.assert_called_once_with('ECHO SAME', executable=None) + action_base._low_level_execute_command(b'ECHO SAME', sudoable=True) + play_context.make_become_cmd.assert_called_once_with(b'ECHO SAME', executable=None) finally: C.BECOME_ALLOW_SAME_USER = become_allow_same_user From 010839aedc5d903b7ef2fac1b564642cd036e95e Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 22 Dec 2015 17:15:58 -0500 Subject: [PATCH 0248/1113] fix no_log disclosure when using aliases --- lib/ansible/module_utils/basic.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index 4aee3b4169d..91ea874d859 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -516,6 +516,7 @@ class AnsibleModule(object): self._debug = False self.aliases = {} + self._legal_inputs = ['_ansible_check_mode', '_ansible_no_log', '_ansible_debug'] if add_file_common_args: for k, v in FILE_COMMON_ARGUMENTS.items(): @@ -524,6 +525,14 @@ class AnsibleModule(object): self.params = self._load_params() + # append to legal_inputs and then possibly check against them + try: + self.aliases = self._handle_aliases() + except Exception, e: + # use exceptions here cause its not safe to call vail json until no_log is processed + print('{"failed": true, "msg": "Module alias error: %s"}' % str(e)) + sys.exit(1) + # Save parameter values that should never be logged self.no_log_values = set() # Use the argspec to determine which args are no_log @@ -538,10 +547,6 @@ class AnsibleModule(object): # reset to LANG=C if it's an invalid/unavailable locale self._check_locale() - self._legal_inputs = ['_ansible_check_mode', '_ansible_no_log', '_ansible_debug'] - - # append to legal_inputs and then possibly check against them - self.aliases = self._handle_aliases() self._check_arguments(check_invalid_arguments) @@ -1064,6 +1069,7 @@ class AnsibleModule(object): self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e) def _handle_aliases(self): + # this uses exceptions as it happens before we can safely call fail_json aliases_results = {} #alias:canon for (k,v) in self.argument_spec.items(): self._legal_inputs.append(k) @@ -1072,11 +1078,11 @@ class AnsibleModule(object): required = v.get('required', False) if default is not None and required: # not alias specific but this is a good place to check this - self.fail_json(msg="internal error: required and default are mutually exclusive for %s" % k) + raise Exception("internal error: required and default are mutually exclusive for %s" % k) if aliases is None: continue if type(aliases) != list: - self.fail_json(msg='internal error: aliases must be a list') + raise Exception('internal error: aliases must be a list') for alias in aliases: self._legal_inputs.append(alias) aliases_results[alias] = k From 202b92179d247e508fe4190edc28614b136a5b89 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 22 Dec 2015 22:09:45 -0500 Subject: [PATCH 0249/1113] corrected role path search order the unfraking was matching roles in current dir as it always returns a full path, pushed to the bottom as match of last resort fixes #13645 --- lib/ansible/playbook/role/definition.py | 70 ++++++++++++------------- 1 file changed, 34 insertions(+), 36 deletions(-) diff --git a/lib/ansible/playbook/role/definition.py b/lib/ansible/playbook/role/definition.py index 7e8f47e9be8..0af49cec91c 100644 --- a/lib/ansible/playbook/role/definition.py +++ b/lib/ansible/playbook/role/definition.py @@ -135,46 +135,44 @@ class RoleDefinition(Base, Become, Conditional, Taggable): append it to the default role path ''' - role_path = unfrackpath(role_name) + # we always start the search for roles in the base directory of the playbook + role_search_paths = [ + os.path.join(self._loader.get_basedir(), u'roles'), + self._loader.get_basedir(), + ] + + # also search in the configured roles path + if C.DEFAULT_ROLES_PATH: + configured_paths = C.DEFAULT_ROLES_PATH.split(os.pathsep) + role_search_paths.extend(configured_paths) + + # finally, append the roles basedir, if it was set, so we can + # search relative to that directory for dependent roles + if self._role_basedir: + role_search_paths.append(self._role_basedir) + + # create a templar class to template the dependency names, in + # case they contain variables + if self._variable_manager is not None: + all_vars = self._variable_manager.get_vars(loader=self._loader, play=self._play) + else: + all_vars = dict() + + templar = Templar(loader=self._loader, variables=all_vars) + role_name = templar.template(role_name) + + # now iterate through the possible paths and return the first one we find + for path in role_search_paths: + path = templar.template(path) + role_path = unfrackpath(os.path.join(path, role_name)) + if self._loader.path_exists(role_path): + return (role_name, role_path) + # if not found elsewhere try to extract path from name + role_path = unfrackpath(role_name) if self._loader.path_exists(role_path): role_name = os.path.basename(role_name) return (role_name, role_path) - else: - # we always start the search for roles in the base directory of the playbook - role_search_paths = [ - os.path.join(self._loader.get_basedir(), u'roles'), - u'./roles', - self._loader.get_basedir(), - u'./' - ] - - # also search in the configured roles path - if C.DEFAULT_ROLES_PATH: - configured_paths = C.DEFAULT_ROLES_PATH.split(os.pathsep) - role_search_paths.extend(configured_paths) - - # finally, append the roles basedir, if it was set, so we can - # search relative to that directory for dependent roles - if self._role_basedir: - role_search_paths.append(self._role_basedir) - - # create a templar class to template the dependency names, in - # case they contain variables - if self._variable_manager is not None: - all_vars = self._variable_manager.get_vars(loader=self._loader, play=self._play) - else: - all_vars = dict() - - templar = Templar(loader=self._loader, variables=all_vars) - role_name = templar.template(role_name) - - # now iterate through the possible paths and return the first one we find - for path in role_search_paths: - path = templar.template(path) - role_path = unfrackpath(os.path.join(path, role_name)) - if self._loader.path_exists(role_path): - return (role_name, role_path) raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)), obj=self._ds) From 957b376f9eb959f4f3627a622f7776a26442bf9c Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 22 Dec 2015 22:45:25 -0500 Subject: [PATCH 0250/1113] better module error handling * now module errors clearly state msg=MODULE FAILURE * module's stdout and stderr go into module_stdout and module_stderr keys which only appear during parsing failure * invocation module_args are deleted from results provided by action plugin as errors can keep us from overwriting and then disclosing info that was meant to be kept hidden due to no_log * fixed invocation module_args set by basic.py as it was creating different keys as the invocation in action plugin base. * results now merge --- lib/ansible/module_utils/basic.py | 4 ++-- lib/ansible/plugins/action/__init__.py | 5 +++-- lib/ansible/plugins/action/normal.py | 5 ++++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index 91ea874d859..0391035e883 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -1530,7 +1530,7 @@ class AnsibleModule(object): if not 'changed' in kwargs: kwargs['changed'] = False if 'invocation' not in kwargs: - kwargs['invocation'] = self.params + kwargs['invocation'] = {'module_args': self.params} kwargs = remove_values(kwargs, self.no_log_values) self.do_cleanup_files() print(self.jsonify(kwargs)) @@ -1542,7 +1542,7 @@ class AnsibleModule(object): assert 'msg' in kwargs, "implementation error -- msg to explain the error is required" kwargs['failed'] = True if 'invocation' not in kwargs: - kwargs['invocation'] = self.params + kwargs['invocation'] = {'module_args': self.params} kwargs = remove_values(kwargs, self.no_log_values) self.do_cleanup_files() print(self.jsonify(kwargs)) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index 765ba663164..5383f8afd43 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -460,9 +460,10 @@ class ActionBase(with_metaclass(ABCMeta, object)): if 'stderr' in res and res['stderr'].startswith(u'Traceback'): data['exception'] = res['stderr'] else: - data['msg'] = res.get('stdout', u'') + data['msg'] = "MODULE FAILURE" + data['module_stdout'] = res.get('stdout', u'') if 'stderr' in res: - data['msg'] += res['stderr'] + data['module_stderr'] = res['stderr'] # pre-split stdout into lines, if stdout is in the data and there # isn't already a stdout_lines value there diff --git a/lib/ansible/plugins/action/normal.py b/lib/ansible/plugins/action/normal.py index f9b55e1ff57..932ad8309c3 100644 --- a/lib/ansible/plugins/action/normal.py +++ b/lib/ansible/plugins/action/normal.py @@ -18,6 +18,7 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.plugins.action import ActionBase +from ansible.utils.vars import merge_hash class ActionModule(ActionBase): @@ -27,7 +28,9 @@ class ActionModule(ActionBase): task_vars = dict() results = super(ActionModule, self).run(tmp, task_vars) - results.update(self._execute_module(tmp=tmp, task_vars=task_vars)) + # remove as modules might hide due to nolog + del results['invocation']['module_args'] + results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars)) # Remove special fields from the result, which can only be set # internally by the executor engine. We do this only here in # the 'normal' action, as other action plugins may set this. From 809c9af68cac56180b336d6ebe29d70b9d10ac14 Mon Sep 17 00:00:00 2001 From: Matt Roberts Date: Wed, 23 Dec 2015 08:18:46 +0000 Subject: [PATCH 0251/1113] Update playbooks_intro.rst If you follow the documentation through in order you shouldn't have read about modules yet. --- docsite/rst/playbooks_intro.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/playbooks_intro.rst b/docsite/rst/playbooks_intro.rst index 28c809f0132..55cd3359be6 100644 --- a/docsite/rst/playbooks_intro.rst +++ b/docsite/rst/playbooks_intro.rst @@ -41,7 +41,7 @@ Each playbook is composed of one or more 'plays' in a list. The goal of a play is to map a group of hosts to some well defined roles, represented by things ansible calls tasks. At a basic level, a task is nothing more than a call -to an ansible module, which you should have learned about in earlier chapters. +to an ansible module (see :doc:`Modules`). By composing a playbook of multiple 'plays', it is possible to orchestrate multi-machine deployments, running certain steps on all From 42b9a206ada579000a64cdcb7a0c82ecfd99c451 Mon Sep 17 00:00:00 2001 From: Michael Scherer Date: Wed, 23 Dec 2015 11:44:30 +0100 Subject: [PATCH 0252/1113] Fix last commit, make it python3 compatible (and py24) --- lib/ansible/module_utils/basic.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index 91ea874d859..f9dc964e676 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -528,7 +528,8 @@ class AnsibleModule(object): # append to legal_inputs and then possibly check against them try: self.aliases = self._handle_aliases() - except Exception, e: + except Exception: + e = get_exception() # use exceptions here cause its not safe to call vail json until no_log is processed print('{"failed": true, "msg": "Module alias error: %s"}' % str(e)) sys.exit(1) From b201cf2ee13a9e4e1c5dc222043e3f1c84940044 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 23 Dec 2015 10:29:59 -0500 Subject: [PATCH 0253/1113] switched from pythonic None to generic null --- docsite/rst/developing_modules.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/developing_modules.rst b/docsite/rst/developing_modules.rst index 141f81bd08b..d3781b2f7fd 100644 --- a/docsite/rst/developing_modules.rst +++ b/docsite/rst/developing_modules.rst @@ -481,7 +481,7 @@ Module checklist * The shebang should always be #!/usr/bin/python, this allows ansible_python_interpreter to work * Documentation: Make sure it exists * `required` should always be present, be it true or false - * If `required` is false you need to document `default`, even if the default is 'None' (which is the default if no parameter is supplied). Make sure default parameter in docs matches default parameter in code. + * If `required` is false you need to document `default`, even if the default is 'null' (which is the default if no parameter is supplied). Make sure default parameter in docs matches default parameter in code. * `default` is not needed for `required: true` * Remove unnecessary doc like `aliases: []` or `choices: []` * The version is not a float number and value the current development version From d89d7951e6fb84cdb04cc35e0aa962d59fe6f553 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 23 Dec 2015 11:45:07 -0500 Subject: [PATCH 0254/1113] fixed tests to follow new invocation structure also added maxdiff setting to see issues clearly when they happen --- .../module_utils/basic/test_exit_json.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/test/units/module_utils/basic/test_exit_json.py b/test/units/module_utils/basic/test_exit_json.py index 931447f8ab6..27bbb0f9e56 100644 --- a/test/units/module_utils/basic/test_exit_json.py +++ b/test/units/module_utils/basic/test_exit_json.py @@ -31,8 +31,11 @@ from ansible.module_utils import basic from ansible.module_utils.basic import heuristic_log_sanitize from ansible.module_utils.basic import return_values, remove_values +empty_invocation = {u'module_args': {}} + @unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is not supported on targets (yet)") class TestAnsibleModuleExitJson(unittest.TestCase): + def setUp(self): self.COMPLEX_ARGS = basic.MODULE_COMPLEX_ARGS basic.MODULE_COMPLEX_ARGS = '{}' @@ -56,7 +59,7 @@ class TestAnsibleModuleExitJson(unittest.TestCase): else: self.assertEquals(ctx.exception.code, 0) return_val = json.loads(self.fake_stream.getvalue()) - self.assertEquals(return_val, dict(changed=False, invocation={})) + self.assertEquals(return_val, dict(changed=False, invocation=empty_invocation)) def test_exit_json_args_exits(self): with self.assertRaises(SystemExit) as ctx: @@ -67,7 +70,7 @@ class TestAnsibleModuleExitJson(unittest.TestCase): else: self.assertEquals(ctx.exception.code, 0) return_val = json.loads(self.fake_stream.getvalue()) - self.assertEquals(return_val, dict(msg="message", changed=False, invocation={})) + self.assertEquals(return_val, dict(msg="message", changed=False, invocation=empty_invocation)) def test_fail_json_exits(self): with self.assertRaises(SystemExit) as ctx: @@ -78,13 +81,13 @@ class TestAnsibleModuleExitJson(unittest.TestCase): else: self.assertEquals(ctx.exception.code, 1) return_val = json.loads(self.fake_stream.getvalue()) - self.assertEquals(return_val, dict(msg="message", failed=True, invocation={})) + self.assertEquals(return_val, dict(msg="message", failed=True, invocation=empty_invocation)) def test_exit_json_proper_changed(self): with self.assertRaises(SystemExit) as ctx: self.module.exit_json(changed=True, msg='success') return_val = json.loads(self.fake_stream.getvalue()) - self.assertEquals(return_val, dict(changed=True, msg='success', invocation={})) + self.assertEquals(return_val, dict(changed=True, msg='success', invocation=empty_invocation)) @unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is not supported on targets (yet)") class TestAnsibleModuleExitValuesRemoved(unittest.TestCase): @@ -95,21 +98,21 @@ class TestAnsibleModuleExitValuesRemoved(unittest.TestCase): not_secret='following the leader', msg='here'), dict(one=1, pwd=OMIT, url='https://username:password12345@foo.com/login/', not_secret='following the leader', changed=False, msg='here', - invocation=dict(password=OMIT, token=None, username='person')), + invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))), ), (dict(username='person', password='password12345'), dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/', not_secret='following the leader', msg='here'), dict(one=1, pwd='$ecret k3y', url='https://username:********@foo.com/login/', not_secret='following the leader', changed=False, msg='here', - invocation=dict(password=OMIT, token=None, username='person')), + invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))), ), (dict(username='person', password='$ecret k3y'), dict(one=1, pwd='$ecret k3y', url='https://username:$ecret k3y@foo.com/login/', not_secret='following the leader', msg='here'), dict(one=1, pwd=OMIT, url='https://username:********@foo.com/login/', not_secret='following the leader', changed=False, msg='here', - invocation=dict(password=OMIT, token=None, username='person')), + invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))), ), ) @@ -122,6 +125,7 @@ class TestAnsibleModuleExitValuesRemoved(unittest.TestCase): sys.stdout = self.old_stdout def test_exit_json_removes_values(self): + self.maxDiff = None for args, return_val, expected in self.dataset: sys.stdout = StringIO() basic.MODULE_COMPLEX_ARGS = json.dumps(args) @@ -137,6 +141,7 @@ class TestAnsibleModuleExitValuesRemoved(unittest.TestCase): self.assertEquals(json.loads(sys.stdout.getvalue()), expected) def test_fail_json_removes_values(self): + self.maxDiff = None for args, return_val, expected in self.dataset: expected = copy.deepcopy(expected) del expected['changed'] From 630a35adb0752dd9a4d74539b91b243bafb4c7d7 Mon Sep 17 00:00:00 2001 From: Matt Martz Date: Wed, 23 Dec 2015 14:57:24 -0600 Subject: [PATCH 0255/1113] Add ProxyCommand support to the paramiko connection plugin --- docsite/rst/intro_configuration.rst | 11 +++++++++++ lib/ansible/constants.py | 1 + .../plugins/connection/paramiko_ssh.py | 19 +++++++++++++++++++ 3 files changed, 31 insertions(+) diff --git a/docsite/rst/intro_configuration.rst b/docsite/rst/intro_configuration.rst index ccfb456ed93..7f21c2e1f61 100644 --- a/docsite/rst/intro_configuration.rst +++ b/docsite/rst/intro_configuration.rst @@ -739,6 +739,17 @@ instead. Setting it to False will improve performance and is recommended when h record_host_keys=True +.. _paramiko_proxy_command + +proxy_command +============= + +.. versionadded:: 2.1 + +Use an OpenSSH like ProxyCommand for proxying all Paramiko SSH connections through a bastion or jump host. Requires a minimum of Paramiko version 1.9.0. On Enterprise Linux 6 this is provided by ``python-paramiko1.10`` in the EPEL repository:: + + proxy_command = ssh -W "%h:%p" bastion + .. _openssh_settings: OpenSSH Specific Settings diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 5df9602246a..7d6a76a19e3 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -244,6 +244,7 @@ ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', ANSIBLE_SSH_PIPELINING = get_config(p, 'ssh_connection', 'pipelining', 'ANSIBLE_SSH_PIPELINING', False, boolean=True) ANSIBLE_SSH_RETRIES = get_config(p, 'ssh_connection', 'retries', 'ANSIBLE_SSH_RETRIES', 0, integer=True) PARAMIKO_RECORD_HOST_KEYS = get_config(p, 'paramiko_connection', 'record_host_keys', 'ANSIBLE_PARAMIKO_RECORD_HOST_KEYS', True, boolean=True) +PARAMIKO_PROXY_COMMAND = get_config(p, 'paramiko_connection', 'proxy_command', 'ANSIBLE_PARAMIKO_PROXY_COMMAND', None) # obsolete -- will be formally removed diff --git a/lib/ansible/plugins/connection/paramiko_ssh.py b/lib/ansible/plugins/connection/paramiko_ssh.py index ab9ce90db95..ea6ca3809d1 100644 --- a/lib/ansible/plugins/connection/paramiko_ssh.py +++ b/lib/ansible/plugins/connection/paramiko_ssh.py @@ -158,6 +158,24 @@ class Connection(ConnectionBase): pass # file was not found, but not required to function ssh.load_system_host_keys() + sock_kwarg = {} + if C.PARAMIKO_PROXY_COMMAND: + replacers = { + '%h': self._play_context.remote_addr, + '%p': port, + '%r': self._play_context.remote_user + } + proxy_command = C.PARAMIKO_PROXY_COMMAND + for find, replace in replacers.items(): + proxy_command = proxy_command.replace(find, str(replace)) + try: + sock_kwarg = {'sock': paramiko.ProxyCommand(proxy_command)} + display.vvv("CONFIGURE PROXY COMMAND FOR CONNECTION: %s" % proxy_command, host=self._play_context.remote_addr) + except AttributeError: + display.warning('Paramiko ProxyCommand support unavailable. ' + 'Please upgrade to Paramiko 1.9.0 or newer. ' + 'Not using configured ProxyCommand') + ssh.set_missing_host_key_policy(MyAddPolicy(self._new_stdin, self)) allow_agent = True @@ -179,6 +197,7 @@ class Connection(ConnectionBase): password=self._play_context.password, timeout=self._play_context.timeout, port=port, + **sock_kwarg ) except Exception as e: msg = str(e) From fd7e01696f659e1a147887087c87e2bad9742209 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Wed, 23 Dec 2015 17:16:21 -0500 Subject: [PATCH 0256/1113] updated submodule refs to pick up module changes --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index fcb3397df79..002028748f0 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit fcb3397df7944ff15ea698b5717c06e8fc7d43ba +Subproject commit 002028748f080961ade801c30e194bfd4ba043ce diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index c6829752d85..19e496c69c2 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit c6829752d852398c255704cd5d7faa54342e143e +Subproject commit 19e496c69c22fc7ec1e3c8306b363a812b85d386 From deac4d00b22f9e0288f5e3c4633e07a7f937d47c Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Thu, 24 Dec 2015 11:32:40 -0800 Subject: [PATCH 0257/1113] bigip changes as requested by bcoca and abadger: * Fix to error if validate_cert is True and python doesn't support it. * Only globally disable certificate checking if really needed. Use bigip verify parameter if available instead. * Remove public disable certificate function to make it less likely people will attempt to reuse that --- lib/ansible/module_utils/f5.py | 36 ++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/lib/ansible/module_utils/f5.py b/lib/ansible/module_utils/f5.py index e04e6b2f1ec..ba336377e7d 100644 --- a/lib/ansible/module_utils/f5.py +++ b/lib/ansible/module_utils/f5.py @@ -51,19 +51,35 @@ def f5_argument_spec(): def f5_parse_arguments(module): if not bigsuds_found: module.fail_json(msg="the python bigsuds module is required") - if not module.params['validate_certs']: - disable_ssl_cert_validation() + + if module.params['validate_certs']: + import ssl + if not hasattr(ssl, 'SSLContext'): + module.fail_json(msg='bigsuds does not support verifying certificates with python < 2.7.9. Either update python or set validate_certs=False on the task') + return (module.params['server'],module.params['user'],module.params['password'],module.params['state'],module.params['partition'],module.params['validate_certs']) -def bigip_api(bigip, user, password): - api = bigsuds.BIGIP(hostname=bigip, username=user, password=password) - return api +def bigip_api(bigip, user, password, validate_certs): + try: + # bigsuds >= 1.0.3 + api = bigsuds.BIGIP(hostname=bigip, username=user, password=password, verify=validate_certs) + except TypeError: + # bigsuds < 1.0.3, no verify param + if validate_certs: + # Note: verified we have SSLContext when we parsed params + api = bigsuds.BIGIP(hostname=bigip, username=user, password=password) + else: + import ssl + if hasattr(ssl, 'SSLContext'): + # Really, you should never do this. It disables certificate + # verification *globally*. But since older bigip libraries + # don't give us a way to toggle verification we need to + # disable it at the global level. + # From https://www.python.org/dev/peps/pep-0476/#id29 + ssl._create_default_https_context = ssl._create_unverified_context + api = bigsuds.BIGIP(hostname=bigip, username=user, password=password) -def disable_ssl_cert_validation(): - # You probably only want to do this for testing and never in production. - # From https://www.python.org/dev/peps/pep-0476/#id29 - import ssl - ssl._create_default_https_context = ssl._create_unverified_context + return api # Fully Qualified name (with the partition) def fq_name(partition,name): From a8e0763d1ec003e5f83c1d848578f7a0a02c9df4 Mon Sep 17 00:00:00 2001 From: Matt Martz Date: Thu, 24 Dec 2015 15:00:53 -0600 Subject: [PATCH 0258/1113] Move _split_args from ssh.py to ConnectionBase so we can use it in other connection plugins --- lib/ansible/plugins/connection/__init__.py | 11 +++++++++++ lib/ansible/plugins/connection/ssh.py | 14 ++------------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/lib/ansible/plugins/connection/__init__.py b/lib/ansible/plugins/connection/__init__.py index 06616bac4ca..4b6c17dc32c 100644 --- a/lib/ansible/plugins/connection/__init__.py +++ b/lib/ansible/plugins/connection/__init__.py @@ -23,6 +23,7 @@ __metaclass__ = type import fcntl import gettext import os +import shlex from abc import ABCMeta, abstractmethod, abstractproperty from functools import wraps @@ -31,6 +32,7 @@ from ansible.compat.six import with_metaclass from ansible import constants as C from ansible.errors import AnsibleError from ansible.plugins import shell_loader +from ansible.utils.unicode import to_bytes, to_unicode try: from __main__ import display @@ -112,6 +114,15 @@ class ConnectionBase(with_metaclass(ABCMeta, object)): ''' pass + @staticmethod + def _split_ssh_args(argstring): + """ + Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a + list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to + the argument list. The list will not contain any empty elements. + """ + return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()] + @abstractproperty def transport(self): """String used to identify this Connection class from other classes""" diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index a2abcf20aee..3da701aa8e4 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -24,7 +24,6 @@ import os import pipes import pty import select -import shlex import subprocess import time @@ -100,15 +99,6 @@ class Connection(ConnectionBase): return controlpersist, controlpath - @staticmethod - def _split_args(argstring): - """ - Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a - list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to - the argument list. The list will not contain any empty elements. - """ - return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()] - def _add_args(self, explanation, args): """ Adds the given args to self._command and displays a caller-supplied @@ -157,7 +147,7 @@ class Connection(ConnectionBase): # Next, we add [ssh_connection]ssh_args from ansible.cfg. if self._play_context.ssh_args: - args = self._split_args(self._play_context.ssh_args) + args = self._split_ssh_args(self._play_context.ssh_args) self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings @@ -210,7 +200,7 @@ class Connection(ConnectionBase): for opt in ['ssh_common_args', binary + '_extra_args']: attr = getattr(self._play_context, opt, None) if attr is not None: - args = self._split_args(attr) + args = self._split_ssh_args(attr) self._add_args("PlayContext set %s" % opt, args) # Check if ControlPersist is enabled and add a ControlPath if one hasn't From 0296209bc139d00d696a9d0722bee01f3bf99c2d Mon Sep 17 00:00:00 2001 From: Matt Martz Date: Thu, 24 Dec 2015 15:01:41 -0600 Subject: [PATCH 0259/1113] Parse ansible_ssh_common_args looking for ProxyCommand, for use in paramiko --- .../plugins/connection/paramiko_ssh.py | 28 +++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/lib/ansible/plugins/connection/paramiko_ssh.py b/lib/ansible/plugins/connection/paramiko_ssh.py index ea6ca3809d1..47028a60a5a 100644 --- a/lib/ansible/plugins/connection/paramiko_ssh.py +++ b/lib/ansible/plugins/connection/paramiko_ssh.py @@ -32,6 +32,7 @@ import tempfile import traceback import fcntl import sys +import re from termios import tcflush, TCIFLUSH from binascii import hexlify @@ -55,6 +56,9 @@ The %s key fingerprint is %s. Are you sure you want to continue connecting (yes/no)? """ +# SSH Options Regex +SETTINGS_REGEX = re.compile(r'(\w+)(?:\s*=\s*|\s+)(.+)') + # prevent paramiko warning noise -- see http://stackoverflow.com/questions/3920502/ HAVE_PARAMIKO=False with warnings.catch_warnings(): @@ -158,14 +162,34 @@ class Connection(ConnectionBase): pass # file was not found, but not required to function ssh.load_system_host_keys() + proxy_command = None + # Parse ansible_ssh_common_args, specifically looking for ProxyCommand + ssh_common_args = getattr(self._play_context, 'ssh_common_args', None) + if ssh_common_args is not None: + args = self._split_ssh_args(ssh_common_args) + for i, arg in enumerate(args): + if arg.lower() == 'proxycommand': + # _split_ssh_args split ProxyCommand from the command itself + proxy_command = args[i + 1] + else: + # ProxyCommand and the command itself are a single string + match = SETTINGS_REGEX.match(arg) + if match: + if match.group(1).lower() == 'proxycommand': + proxy_command = match.group(2) + + if proxy_command: + break + + proxy_command = proxy_command or C.PARAMIKO_PROXY_COMMAND + sock_kwarg = {} - if C.PARAMIKO_PROXY_COMMAND: + if proxy_command: replacers = { '%h': self._play_context.remote_addr, '%p': port, '%r': self._play_context.remote_user } - proxy_command = C.PARAMIKO_PROXY_COMMAND for find, replace in replacers.items(): proxy_command = proxy_command.replace(find, str(replace)) try: From 2587edb4f31390f51678bfaa2764146a16ed2841 Mon Sep 17 00:00:00 2001 From: Matt Martz Date: Thu, 24 Dec 2015 15:10:42 -0600 Subject: [PATCH 0260/1113] Move proxycommand parsing into _parse_proxy_command --- .../plugins/connection/paramiko_ssh.py | 47 ++++++++++--------- 1 file changed, 26 insertions(+), 21 deletions(-) diff --git a/lib/ansible/plugins/connection/paramiko_ssh.py b/lib/ansible/plugins/connection/paramiko_ssh.py index 47028a60a5a..21dfe0c7bc3 100644 --- a/lib/ansible/plugins/connection/paramiko_ssh.py +++ b/lib/ansible/plugins/connection/paramiko_ssh.py @@ -141,27 +141,7 @@ class Connection(ConnectionBase): self.ssh = SSH_CONNECTION_CACHE[cache_key] = self._connect_uncached() return self - def _connect_uncached(self): - ''' activates the connection object ''' - - if not HAVE_PARAMIKO: - raise AnsibleError("paramiko is not installed") - - port = self._play_context.port or 22 - display.vvv("ESTABLISH CONNECTION FOR USER: %s on PORT %s TO %s" % (self._play_context.remote_user, port, self._play_context.remote_addr), host=self._play_context.remote_addr) - - ssh = paramiko.SSHClient() - - self.keyfile = os.path.expanduser("~/.ssh/known_hosts") - - if C.HOST_KEY_CHECKING: - try: - #TODO: check if we need to look at several possible locations, possible for loop - ssh.load_system_host_keys("/etc/ssh/ssh_known_hosts") - except IOError: - pass # file was not found, but not required to function - ssh.load_system_host_keys() - + def _parse_proxy_command(self, port=22): proxy_command = None # Parse ansible_ssh_common_args, specifically looking for ProxyCommand ssh_common_args = getattr(self._play_context, 'ssh_common_args', None) @@ -200,6 +180,31 @@ class Connection(ConnectionBase): 'Please upgrade to Paramiko 1.9.0 or newer. ' 'Not using configured ProxyCommand') + return sock_kwarg + + def _connect_uncached(self): + ''' activates the connection object ''' + + if not HAVE_PARAMIKO: + raise AnsibleError("paramiko is not installed") + + port = self._play_context.port or 22 + display.vvv("ESTABLISH CONNECTION FOR USER: %s on PORT %s TO %s" % (self._play_context.remote_user, port, self._play_context.remote_addr), host=self._play_context.remote_addr) + + ssh = paramiko.SSHClient() + + self.keyfile = os.path.expanduser("~/.ssh/known_hosts") + + if C.HOST_KEY_CHECKING: + try: + #TODO: check if we need to look at several possible locations, possible for loop + ssh.load_system_host_keys("/etc/ssh/ssh_known_hosts") + except IOError: + pass # file was not found, but not required to function + ssh.load_system_host_keys() + + sock_kwarg = self._parse_proxy_command(port) + ssh.set_missing_host_key_policy(MyAddPolicy(self._new_stdin, self)) allow_agent = True From cd9e18d0e52c1915132614e6e2946a26968e3091 Mon Sep 17 00:00:00 2001 From: Stephen Medina Date: Fri, 25 Dec 2015 08:56:08 -0800 Subject: [PATCH 0261/1113] clarify idempotence explanation Small typo; wasn't sure what to replace it with. --- docsite/rst/intro_adhoc.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/intro_adhoc.rst b/docsite/rst/intro_adhoc.rst index 9e104d5836f..61ba33523a6 100644 --- a/docsite/rst/intro_adhoc.rst +++ b/docsite/rst/intro_adhoc.rst @@ -112,7 +112,7 @@ For example, using double rather than single quotes in the above example would evaluate the variable on the box you were on. So far we've been demoing simple command execution, but most Ansible modules usually do not work like -simple scripts. They make the remote system look like you state, and run the commands necessary to +simple scripts. They make the remote system look like a state, and run the commands necessary to get it there. This is commonly referred to as 'idempotence', and is a core design goal of Ansible. However, we also recognize that running arbitrary commands is equally important, so Ansible easily supports both. From d70a97b562da1b06d21a86fd1c7619bfa2b6a2e6 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Fri, 25 Dec 2015 12:17:22 -0800 Subject: [PATCH 0262/1113] Update submodule refs --- lib/ansible/modules/extras | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index 19e496c69c2..f6a7b6dd1f7 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit 19e496c69c22fc7ec1e3c8306b363a812b85d386 +Subproject commit f6a7b6dd1f7be93ba640c50bf26adeeabb5af46f From 0b92abaf67de53349bb4d2733f49750d9a4d8277 Mon Sep 17 00:00:00 2001 From: Etherdaemon Date: Sun, 27 Dec 2015 21:31:59 +1000 Subject: [PATCH 0263/1113] Proposed fix for ansible/ansible-modules-extras#1348 due to datetime.datetime type not being matched --- lib/ansible/module_utils/basic.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index 6fd382aa490..89d595a0bf3 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -65,6 +65,7 @@ import grp import pwd import platform import errno +import datetime from itertools import repeat, chain try: @@ -423,10 +424,13 @@ def remove_values(value, no_log_strings): for omit_me in no_log_strings: if omit_me in stringy_value: return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER' + elif isinstance(value, datetime.datetime): + value = value.isoformat() else: raise TypeError('Value of unknown type: %s, %s' % (type(value), value)) return value + def heuristic_log_sanitize(data, no_log_values=None): ''' Remove strings that look like passwords from log messages ''' # Currently filters: From c489b271d152820ab11b73d11877f8805318cd7a Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Sun, 27 Dec 2015 14:17:20 -0500 Subject: [PATCH 0264/1113] updated release cycle to 4 months instead of 2 --- docsite/rst/intro_installation.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/docsite/rst/intro_installation.rst b/docsite/rst/intro_installation.rst index e986ffd70f6..a5ed83a3027 100644 --- a/docsite/rst/intro_installation.rst +++ b/docsite/rst/intro_installation.rst @@ -27,12 +27,11 @@ What Version To Pick? ````````````````````` Because it runs so easily from source and does not require any installation of software on remote -machines, many users will actually track the development version. +machines, many users will actually track the development version. -Ansible's release cycles are usually about two months long. Due to this -short release cycle, minor bugs will generally be fixed in the next release versus maintaining -backports on the stable branch. Major bugs will still have maintenance releases when needed, though -these are infrequent. +Ansible's release cycles are usually about four months long. Due to this short release cycle, +minor bugs will generally be fixed in the next release versus maintaining backports on the stable branch. +Major bugs will still have maintenance releases when needed, though these are infrequent. If you are wishing to run the latest released version of Ansible and you are running Red Hat Enterprise Linux (TM), CentOS, Fedora, Debian, or Ubuntu, we recommend using the OS package manager. From 20005660313b5abc4188704fc3a37a4c25f83e62 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 28 Dec 2015 10:24:28 -0500 Subject: [PATCH 0265/1113] minor fix to become docs --- docsite/rst/become.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docsite/rst/become.rst b/docsite/rst/become.rst index 64628515c6c..7597643f883 100644 --- a/docsite/rst/become.rst +++ b/docsite/rst/become.rst @@ -1,5 +1,5 @@ -Ansible Privilege Escalation -++++++++++++++++++++++++++++ +Become (Privilege Escalation) ++++++++++++++++++++++++++++++ Ansible can use existing privilege escalation systems to allow a user to execute tasks as another. @@ -7,17 +7,17 @@ Ansible can use existing privilege escalation systems to allow a user to execute Become `````` -Before 1.9 Ansible mostly allowed the use of sudo and a limited use of su to allow a login/remote user to become a different user -and execute tasks, create resources with the 2nd user's permissions. As of 1.9 'become' supersedes the old sudo/su, while still -being backwards compatible. This new system also makes it easier to add other privilege escalation tools like pbrun (Powerbroker), -pfexec and others. +Before 1.9 Ansible mostly allowed the use of `sudo` and a limited use of `su` to allow a login/remote user to become a different user +and execute tasks, create resources with the 2nd user's permissions. As of 1.9 `become` supersedes the old sudo/su, while still +being backwards compatible. This new system also makes it easier to add other privilege escalation tools like `pbrun` (Powerbroker), +`pfexec` and others. New directives -------------- become - equivalent to adding 'sudo:' or 'su:' to a play or task, set to 'true'/'yes' to activate privilege escalation + equivalent to adding `sudo:` or `su:` to a play or task, set to 'true'/'yes' to activate privilege escalation become_user equivalent to adding 'sudo_user:' or 'su_user:' to a play or task, set to user with desired privileges From 56454d6a9135fb18e5d0545b9162b940cbcb8a78 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 28 Dec 2015 12:25:27 -0500 Subject: [PATCH 0266/1113] added newer vars to 'reset_vars' these vars pass back info to the task about the connection moved to their own block at start at file for readability and added the newer standard vars --- lib/ansible/playbook/play_context.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py index 81223500adf..6b19f4c1723 100644 --- a/lib/ansible/playbook/play_context.py +++ b/lib/ansible/playbook/play_context.py @@ -125,6 +125,18 @@ TASK_ATTRIBUTE_OVERRIDES = ( 'remote_user', ) +RESET_VARS = ( + 'ansible_connection', + 'ansible_ssh_host', + 'ansible_ssh_pass', + 'ansible_ssh_port', + 'ansible_ssh_user', + 'ansible_ssh_private_key_file', + 'ansible_ssh_pipelining', + 'ansible_user', + 'ansible_host', + 'ansible_port', +) class PlayContext(Base): @@ -505,7 +517,8 @@ class PlayContext(Base): # TODO: should we be setting the more generic values here rather than # the more specific _ssh_ ones? - for special_var in ['ansible_connection', 'ansible_ssh_host', 'ansible_ssh_pass', 'ansible_ssh_port', 'ansible_ssh_user', 'ansible_ssh_private_key_file', 'ansible_ssh_pipelining']: + for special_var in RESET_VARS: + if special_var not in variables: for prop, varnames in MAGIC_VARIABLE_MAPPING.items(): if special_var in varnames: From 2d11cfab92f9d26448461b4bc81f466d1910a15e Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Tue, 29 Dec 2015 11:40:18 -0500 Subject: [PATCH 0267/1113] Squashed commit of the following: commit 24efa310b58c431b4d888a6315d1285da918f670 Author: James Cammarata Date: Tue Dec 29 11:23:52 2015 -0500 Adding an additional test for copy exclusion Adds a negative test for the situation when an exclusion doesn't exist in the target to be copied. commit 643ba054877cf042177d65e6e2958178bdd2fe88 Merge: e6ee59f 66a8f7e Author: James Cammarata Date: Tue Dec 29 10:59:18 2015 -0500 Merge branch 'speedup' of https://github.com/chrismeyersfsu/ansible into chrismeyersfsu-speedup commit 66a8f7e873ca90f7848e47b04d9b62aed23a45df Author: Chris Meyers Date: Mon Dec 28 09:47:00 2015 -0500 better api and tests added * _copy_results = deepcopy for better performance * _copy_results_exclude to deepcopy but exclude certain fields. Pop fields that do not need to be deep copied. Re-assign popped fields after deep copy so we don't modify the original, to be copied, object. * _copy_results_exclude unit tests commit 93490960ff4e75f38a7cc6f6d49f10f949f1a7da Author: Chris Meyers Date: Fri Dec 25 23:17:26 2015 -0600 remove uneeded deepcopy fields --- lib/ansible/plugins/callback/__init__.py | 19 ++++- test/units/plugins/callback/test_callback.py | 82 ++++++++++++++++++++ 2 files changed, 97 insertions(+), 4 deletions(-) create mode 100644 test/units/plugins/callback/test_callback.py diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py index 7371fe0a51e..cc2a9ad0e75 100644 --- a/lib/ansible/plugins/callback/__init__.py +++ b/lib/ansible/plugins/callback/__init__.py @@ -59,9 +59,20 @@ class CallbackBase: version = getattr(self, 'CALLBACK_VERSION', '1.0') self._display.vvvv('Loaded callback %s of type %s, v%s' % (name, ctype, version)) - def _copy_result(self, result): - ''' helper for callbacks, so they don't all have to include deepcopy ''' - return deepcopy(result) + ''' helper for callbacks, so they don't all have to include deepcopy ''' + _copy_result = deepcopy + + def _copy_result_exclude(self, result, exclude): + values = [] + for e in exclude: + values.append(getattr(result, e)) + setattr(result, e, None) + + result_copy = deepcopy(result) + for i,e in enumerate(exclude): + setattr(result, e, values[i]) + + return result_copy def _dump_results(self, result, indent=None, sort_keys=True, keep_invocation=False): if result.get('_ansible_no_log', False): @@ -130,7 +141,7 @@ class CallbackBase: def _process_items(self, result): for res in result._result['results']: - newres = self._copy_result(result) + newres = self._copy_result_exclude(result, ['_result']) res['item'] = self._get_item(res) newres._result = res if 'failed' in res and res['failed']: diff --git a/test/units/plugins/callback/test_callback.py b/test/units/plugins/callback/test_callback.py new file mode 100644 index 00000000000..54964ac9df2 --- /dev/null +++ b/test/units/plugins/callback/test_callback.py @@ -0,0 +1,82 @@ +# (c) 2012-2014, Chris Meyers +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from six import PY3 +from copy import deepcopy + +from ansible.compat.tests import unittest +from ansible.compat.tests.mock import patch, mock_open + +from ansible.plugins.callback import CallbackBase +import ansible.plugins.callback as callish + +class TestCopyResultExclude(unittest.TestCase): + def setUp(self): + class DummyClass(): + def __init__(self): + self.bar = [ 1, 2, 3 ] + self.a = { + "b": 2, + "c": 3, + } + self.b = { + "c": 3, + "d": 4, + } + self.foo = DummyClass() + self.cb = CallbackBase() + + def tearDown(self): + pass + + def test_copy_logic(self): + res = self.cb._copy_result_exclude(self.foo, ()) + self.assertEqual(self.foo.bar, res.bar) + + def test_copy_deep(self): + res = self.cb._copy_result_exclude(self.foo, ()) + self.assertNotEqual(id(self.foo.bar), id(res.bar)) + + def test_no_exclude(self): + res = self.cb._copy_result_exclude(self.foo, ()) + self.assertEqual(self.foo.bar, res.bar) + self.assertEqual(self.foo.a, res.a) + self.assertEqual(self.foo.b, res.b) + + def test_exclude(self): + res = self.cb._copy_result_exclude(self.foo, ['bar', 'b']) + self.assertIsNone(res.bar) + self.assertIsNone(res.b) + self.assertEqual(self.foo.a, res.a) + + def test_result_unmodified(self): + bar_id = id(self.foo.bar) + a_id = id(self.foo.a) + res = self.cb._copy_result_exclude(self.foo, ['bar', 'a']) + + self.assertEqual(self.foo.bar, [ 1, 2, 3 ]) + self.assertEqual(bar_id, id(self.foo.bar)) + + self.assertEqual(self.foo.a, dict(b=2, c=3)) + self.assertEqual(a_id, id(self.foo.a)) + + self.assertRaises(AttributeError, self.cb._copy_result_exclude, self.foo, ['a', 'c', 'bar']) + From d3deb24ead59d5fdbecad3c946848537f95772ad Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Tue, 29 Dec 2015 15:41:00 -0500 Subject: [PATCH 0268/1113] output color is now configurable --- examples/ansible.cfg | 11 ++++++ lib/ansible/cli/galaxy.py | 25 +++++++------- lib/ansible/constants.py | 11 ++++++ lib/ansible/executor/task_executor.py | 2 +- lib/ansible/playbook/__init__.py | 3 +- lib/ansible/plugins/callback/default.py | 46 ++++++++++++------------- lib/ansible/plugins/callback/minimal.py | 17 +++++---- lib/ansible/plugins/callback/oneline.py | 14 ++++---- lib/ansible/utils/color.py | 3 +- lib/ansible/utils/display.py | 14 ++++---- 10 files changed, 86 insertions(+), 60 deletions(-) diff --git a/examples/ansible.cfg b/examples/ansible.cfg index ec3ddf20641..b357738b39c 100644 --- a/examples/ansible.cfg +++ b/examples/ansible.cfg @@ -262,3 +262,14 @@ # the default behaviour that copies the existing context or uses the user default # needs to be changed to use the file system dependent context. #special_context_filesystems=nfs,vboxsf,fuse,ramfs + +[colors] +#verbose = blue +#warn = bright purple +#error = red +#debug = dark gray +#deprecate = purple +#skip = cyan +#unreachable = red +#ok = green +#changed = yellow diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 34afa03c9f7..476a7d0f897 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -514,7 +514,7 @@ class GalaxyCLI(CLI): tags=self.options.tags, author=self.options.author, page_size=page_size) if response['count'] == 0: - display.display("No roles match your search.", color="yellow") + display.display("No roles match your search.", color=C.COLOR_ERROR) return True data = '' @@ -570,10 +570,10 @@ class GalaxyCLI(CLI): colors = { 'INFO': 'normal', - 'WARNING': 'yellow', - 'ERROR': 'red', - 'SUCCESS': 'green', - 'FAILED': 'red' + 'WARNING': C.COLOR_WARN, + 'ERROR': C.COLOR_ERROR, + 'SUCCESS': C.COLOR_OK, + 'FAILED': C.COLOR_ERROR, } if len(self.args) < 2: @@ -592,11 +592,10 @@ class GalaxyCLI(CLI): # found multiple roles associated with github_user/github_repo display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user,github_repo), color='yellow') - display.display("The following Galaxy roles are being updated:" + u'\n', color='yellow') + display.display("The following Galaxy roles are being updated:" + u'\n', color=C.COLOR_CHANGED) for t in task: - display.display('%s.%s' % (t['summary_fields']['role']['namespace'],t['summary_fields']['role']['name']), color='yellow') - display.display(u'\n' + "To properly namespace this role, remove each of the above and re-import %s/%s from scratch" % (github_user,github_repo), - color='yellow') + display.display('%s.%s' % (t['summary_fields']['role']['namespace'],t['summary_fields']['role']['name']), color=C.COLOR_CHANGED) + display.display(u'\n' + "To properly namespace this role, remove each of the above and re-import %s/%s from scratch" % (github_user,github_repo), color=C.COLOR_CHANGED) return 0 # found a single role as expected display.display("Successfully submitted import request %d" % task[0]['id']) @@ -633,17 +632,17 @@ class GalaxyCLI(CLI): # None found display.display("No integrations found.") return 0 - display.display(u'\n' + "ID Source Repo", color="green") - display.display("---------- ---------- ----------", color="green") + display.display(u'\n' + "ID Source Repo", color=C.COLOR_OK) + display.display("---------- ---------- ----------", color=C.COLOR_OK) for secret in secrets: display.display("%-10s %-10s %s/%s" % (secret['id'], secret['source'], secret['github_user'], - secret['github_repo']),color="green") + secret['github_repo']),color=C.COLOR_OK) return 0 if self.options.remove_id: # Remove a secret self.api.remove_secret(self.options.remove_id) - display.display("Secret removed. Integrations using this secret will not longer work.", color="green") + display.display("Secret removed. Integrations using this secret will not longer work.", color=C.COLOR_OK) return 0 if len(self.args) < 4: diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 5df9602246a..9b84825d6bc 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -268,6 +268,17 @@ GALAXY_SCMS = get_config(p, 'galaxy', 'scms', 'ANSIBLE_GALAXY DEFAULT_PASSWORD_CHARS = ascii_letters + digits + ".,:-_" STRING_TYPE_FILTERS = get_config(p, 'jinja2', 'dont_type_filters', 'ANSIBLE_STRING_TYPE_FILTERS', ['string', 'to_json', 'to_nice_json', 'to_yaml', 'ppretty', 'json'], islist=True ) +# colors +COLOR_VERBOSE = get_config(p, 'colors', 'verbose', 'ANSIBLE_COLOR_VERBOSE', 'blue') +COLOR_WARN = get_config(p, 'colors', 'warn', 'ANSIBLE_COLOR_WARN', 'bright purple') +COLOR_ERROR = get_config(p, 'colors', 'error', 'ANSIBLE_COLOR_ERROR', 'red') +COLOR_DEBUG = get_config(p, 'colors', 'debug', 'ANSIBLE_COLOR_DEBUG', 'dark gray') +COLOR_DEPRECATE = get_config(p, 'colors', 'deprecate', 'ANSIBLE_COLOR_DEPRECATE', 'purple') +COLOR_SKIP = get_config(p, 'colors', 'skip', 'ANSIBLE_COLOR_SKIP', 'cyan') +COLOR_UNREACHABLE = get_config(p, 'colors', 'unreachable', 'ANSIBLE_COLOR_UNREACHABLE', 'bright red') +COLOR_OK = get_config(p, 'colors', 'ok', 'ANSIBLE_COLOR_OK', 'green') +COLOR_CHANGED = get_config(p, 'colors', 'ok', 'ANSIBLE_COLOR_CHANGED', 'yellow') + # non-configurable things MODULE_REQUIRE_ARGS = ['command', 'shell', 'raw', 'script'] MODULE_NO_JSON = ['command', 'shell', 'raw'] diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index c8b6fa179bc..4a2d30a2cd2 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -393,7 +393,7 @@ class TaskExecutor: result = None for attempt in range(retries): if attempt > 0: - display.display("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result), color="dark gray") + display.display("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result), color=C.COLOR_DEBUG) result['attempts'] = attempt + 1 display.debug("running the handler") diff --git a/lib/ansible/playbook/__init__.py b/lib/ansible/playbook/__init__.py index 0ae443f8436..947224d61fc 100644 --- a/lib/ansible/playbook/__init__.py +++ b/lib/ansible/playbook/__init__.py @@ -25,6 +25,7 @@ from ansible.errors import AnsibleParserError from ansible.playbook.play import Play from ansible.playbook.playbook_include import PlaybookInclude from ansible.plugins import get_all_plugin_loaders +from ansible import constants as C try: from __main__ import display @@ -87,7 +88,7 @@ class Playbook: if pb is not None: self._entries.extend(pb._entries) else: - display.display("skipping playbook include '%s' due to conditional test failure" % entry.get('include', entry), color='cyan') + display.display("skipping playbook include '%s' due to conditional test failure" % entry.get('include', entry), color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader) self._entries.append(entry_obj) diff --git a/lib/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py index e515945bba5..421104ee837 100644 --- a/lib/ansible/plugins/callback/default.py +++ b/lib/ansible/plugins/callback/default.py @@ -44,7 +44,7 @@ class CallbackModule(CallbackBase): else: msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'] - self._display.display(msg, color='red') + self._display.display(msg, color=C.COLOR_ERROR) # finally, remove the exception from the result so it's not shown every time del result._result['exception'] @@ -53,12 +53,12 @@ class CallbackModule(CallbackBase): self._process_items(result) else: if delegated_vars: - self._display.display("fatal: [%s -> %s]: FAILED! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color='red') + self._display.display("fatal: [%s -> %s]: FAILED! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color=C.COLOR_ERROR) else: - self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red') + self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_ERROR) if result._task.ignore_errors: - self._display.display("...ignoring", color='cyan') + self._display.display("...ignoring", color=C.COLOR_SKIP) def v2_runner_on_ok(self, result): @@ -71,13 +71,13 @@ class CallbackModule(CallbackBase): msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) else: msg = "changed: [%s]" % result._host.get_name() - color = 'yellow' + color = C.COLOR_CHANGED else: if delegated_vars: msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) else: msg = "ok: [%s]" % result._host.get_name() - color = 'green' + color = C.COLOR_OK if result._task.loop and 'results' in result._result: self._process_items(result) @@ -97,17 +97,17 @@ class CallbackModule(CallbackBase): msg = "skipping: [%s]" % result._host.get_name() if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result: msg += " => %s" % self._dump_results(result._result) - self._display.display(msg, color='cyan') + self._display.display(msg, color=C.COLOR_SKIP) def v2_runner_on_unreachable(self, result): delegated_vars = result._result.get('_ansible_delegated_vars', None) if delegated_vars: - self._display.display("fatal: [%s -> %s]: UNREACHABLE! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color='red') + self._display.display("fatal: [%s -> %s]: UNREACHABLE! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color=C.COLOR_ERROR) else: - self._display.display("fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red') + self._display.display("fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_ERROR) def v2_playbook_on_no_hosts_matched(self): - self._display.display("skipping: no hosts matched", color='cyan') + self._display.display("skipping: no hosts matched", color=C.COLOR_SKIP) def v2_playbook_on_no_hosts_remaining(self): self._display.banner("NO MORE HOSTS LEFT") @@ -117,7 +117,7 @@ class CallbackModule(CallbackBase): if self._display.verbosity > 2: path = task.get_path() if path: - self._display.display("task path: %s" % path, color='dark gray') + self._display.display("task path: %s" % path, color=C.COLOR_DEBUG) def v2_playbook_on_cleanup_task_start(self, task): self._display.banner("CLEANUP TASK [%s]" % task.get_name().strip()) @@ -155,13 +155,13 @@ class CallbackModule(CallbackBase): msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) else: msg = "changed: [%s]" % result._host.get_name() - color = 'yellow' + color = C.COLOR_CHANGED else: if delegated_vars: msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) else: msg = "ok: [%s]" % result._host.get_name() - color = 'green' + color = C.COLOR_OK msg += " => (item=%s)" % (result._result['item'],) @@ -179,15 +179,15 @@ class CallbackModule(CallbackBase): else: msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'] - self._display.display(msg, color='red') + self._display.display(msg, color=C.COLOR_ERROR) # finally, remove the exception from the result so it's not shown every time del result._result['exception'] if delegated_vars: - self._display.display("failed: [%s -> %s] => (item=%s) => %s" % (result._host.get_name(), delegated_vars['ansible_host'], result._result['item'], self._dump_results(result._result)), color='red') + self._display.display("failed: [%s -> %s] => (item=%s) => %s" % (result._host.get_name(), delegated_vars['ansible_host'], result._result['item'], self._dump_results(result._result)), color=C.COLOR_ERROR) else: - self._display.display("failed: [%s] => (item=%s) => %s" % (result._host.get_name(), result._result['item'], self._dump_results(result._result)), color='red') + self._display.display("failed: [%s] => (item=%s) => %s" % (result._host.get_name(), result._result['item'], self._dump_results(result._result)), color=C.COLOR_ERROR) self._handle_warnings(result._result) @@ -195,12 +195,12 @@ class CallbackModule(CallbackBase): msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), result._result['item']) if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result: msg += " => %s" % self._dump_results(result._result) - self._display.display(msg, color='cyan') + self._display.display(msg, color=C.COLOR_SKIP) def v2_playbook_on_include(self, included_file): msg = 'included: %s for %s' % (included_file._filename, ", ".join([h.name for h in included_file._hosts])) - color = 'cyan' - self._display.display(msg, color='cyan') + color = C.COLOR_SKIP + self._display.display(msg, color=C.COLOR_SKIP) def v2_playbook_on_stats(self, stats): self._display.banner("PLAY RECAP") @@ -211,10 +211,10 @@ class CallbackModule(CallbackBase): self._display.display(u"%s : %s %s %s %s" % ( hostcolor(h, t), - colorize(u'ok', t['ok'], 'green'), - colorize(u'changed', t['changed'], 'yellow'), - colorize(u'unreachable', t['unreachable'], 'red'), - colorize(u'failed', t['failures'], 'red')), + colorize(u'ok', t['ok'], C.COLOR_OK), + colorize(u'changed', t['changed'], C.COLOR_CHANGED), + colorize(u'unreachable', t['unreachable'], C.COLOR_UNREACHABLE), + colorize(u'failed', t['failures'], C.COLOR_ERROR)), screen_only=True ) diff --git a/lib/ansible/plugins/callback/minimal.py b/lib/ansible/plugins/callback/minimal.py index 71f9f5dfeef..9fa257af747 100644 --- a/lib/ansible/plugins/callback/minimal.py +++ b/lib/ansible/plugins/callback/minimal.py @@ -53,29 +53,32 @@ class CallbackModule(CallbackBase): else: msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'] - self._display.display(msg, color='red') + self._display.display(msg, color=C.COLOR_ERROR) # finally, remove the exception from the result so it's not shown every time del result._result['exception'] if result._task.action in C.MODULE_NO_JSON: - self._display.display(self._command_generic_msg(result._host.get_name(), result._result, "FAILED"), color='red') + self._display.display(self._command_generic_msg(result._host.get_name(), result._result, "FAILED"), color=C.COLOR_ERROR) else: - self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color='red') + self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color=C.COLOR_ERROR) def v2_runner_on_ok(self, result): self._clean_results(result._result, result._task.action) if result._task.action in C.MODULE_NO_JSON: - self._display.display(self._command_generic_msg(result._host.get_name(), result._result, "SUCCESS"), color='green') + self._display.display(self._command_generic_msg(result._host.get_name(), result._result, "SUCCESS"), color=C.COLOR_OK) else: - self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color='green') + if 'changed' in result._result and result._result['changed']: + self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color=C.COLOR_CHANGED) + else: + self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color=C.COLOR_OK) self._handle_warnings(result._result) def v2_runner_on_skipped(self, result): - self._display.display("%s | SKIPPED" % (result._host.get_name()), color='cyan') + self._display.display("%s | SKIPPED" % (result._host.get_name()), color=C.COLOR_SKIP) def v2_runner_on_unreachable(self, result): - self._display.display("%s | UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color='yellow') + self._display.display("%s | UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color=C.COLOR_UNREACHABLE) def v2_on_file_diff(self, result): if 'diff' in result._result and result._result['diff']: diff --git a/lib/ansible/plugins/callback/oneline.py b/lib/ansible/plugins/callback/oneline.py index a99b680c05c..0f6283fd441 100644 --- a/lib/ansible/plugins/callback/oneline.py +++ b/lib/ansible/plugins/callback/oneline.py @@ -52,24 +52,24 @@ class CallbackModule(CallbackBase): msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'].replace('\n','') if result._task.action in C.MODULE_NO_JSON: - self._display.display(self._command_generic_msg(result._host.get_name(), result._result,'FAILED'), color='red') + self._display.display(self._command_generic_msg(result._host.get_name(), result._result,'FAILED'), color=C.COLOR_ERROR) else: - self._display.display(msg, color='red') + self._display.display(msg, color=C.COLOR_ERROR) # finally, remove the exception from the result so it's not shown every time del result._result['exception'] - self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n','')), color='red') + self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n','')), color=C.COLOR_ERROR) def v2_runner_on_ok(self, result): if result._task.action in C.MODULE_NO_JSON: - self._display.display(self._command_generic_msg(result._host.get_name(), result._result,'SUCCESS'), color='green') + self._display.display(self._command_generic_msg(result._host.get_name(), result._result,'SUCCESS'), color=C.COLOR_OK) else: - self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n','')), color='green') + self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n','')), color=C.COLOR_OK) def v2_runner_on_unreachable(self, result): - self._display.display("%s | UNREACHABLE!" % result._host.get_name(), color='yellow') + self._display.display("%s | UNREACHABLE!" % result._host.get_name(), color=C.COLOR_UNREACHABLE) def v2_runner_on_skipped(self, result): - self._display.display("%s | SKIPPED" % (result._host.get_name()), color='cyan') + self._display.display("%s | SKIPPED" % (result._host.get_name()), color=C.COLOR_SKIP) diff --git a/lib/ansible/utils/color.py b/lib/ansible/utils/color.py index 55060ace040..81a05d749e1 100644 --- a/lib/ansible/utils/color.py +++ b/lib/ansible/utils/color.py @@ -62,7 +62,8 @@ codeCodes = { 'purple': u'0;35', 'bright red': u'1;31', 'yellow': u'0;33', 'bright purple': u'1;35', 'dark gray': u'1;30', 'bright yellow': u'1;33', - 'normal': u'0' + 'magenta': u'0;35', 'bright magenta': u'1;35', + 'normal': u'0' , } def stringc(text, color): diff --git a/lib/ansible/utils/display.py b/lib/ansible/utils/display.py index 3d51f17de47..8700a510186 100644 --- a/lib/ansible/utils/display.py +++ b/lib/ansible/utils/display.py @@ -145,7 +145,7 @@ class Display: # characters that are invalid in the user's locale msg2 = to_unicode(msg2, self._output_encoding(stderr=stderr)) - if color == 'red': + if color == C.COLOR_ERROR: logger.error(msg2) else: logger.info(msg2) @@ -168,7 +168,7 @@ class Display: def debug(self, msg): if C.DEFAULT_DEBUG: debug_lock.acquire() - self.display("%6d %0.5f: %s" % (os.getpid(), time.time(), msg), color='dark gray') + self.display("%6d %0.5f: %s" % (os.getpid(), time.time(), msg), color=C.COLOR_DEBUG) debug_lock.release() def verbose(self, msg, host=None, caplevel=2): @@ -176,9 +176,9 @@ class Display: #msg = utils.sanitize_output(msg) if self.verbosity > caplevel: if host is None: - self.display(msg, color='blue') + self.display(msg, color=C.COLOR_VERBOSE) else: - self.display("<%s> %s" % (host, msg), color='blue', screen_only=True) + self.display("<%s> %s" % (host, msg), color=C.COLOR_VERBOSE, screen_only=True) def deprecated(self, msg, version=None, removed=False): ''' used to print out a deprecation message.''' @@ -199,7 +199,7 @@ class Display: new_msg = "\n".join(wrapped) + "\n" if new_msg not in self._deprecations: - self.display(new_msg.strip(), color='purple', stderr=True) + self.display(new_msg.strip(), color=C.COLOR_DEPRECATE, stderr=True) self._deprecations[new_msg] = 1 def warning(self, msg): @@ -207,7 +207,7 @@ class Display: wrapped = textwrap.wrap(new_msg, self.columns) new_msg = "\n".join(wrapped) + "\n" if new_msg not in self._warns: - self.display(new_msg, color='bright purple', stderr=True) + self.display(new_msg, color=C.COLOR_WARN, stderr=True) self._warns[new_msg] = 1 def system_warning(self, msg): @@ -258,7 +258,7 @@ class Display: else: new_msg = msg if new_msg not in self._errors: - self.display(new_msg, color='red', stderr=True) + self.display(new_msg, color=C.COLOR_ERROR, stderr=True) self._errors[new_msg] = 1 @staticmethod From 5accc9858739d2184235bf8722b83ff7bcc97056 Mon Sep 17 00:00:00 2001 From: mgarstecki Date: Wed, 30 Dec 2015 11:57:12 +0100 Subject: [PATCH 0269/1113] Correction of a double negation The sentence seemed to imply that return codes from modules are significant, while they are not. The second part of the sentence confirms this, as it advises to use standard return codes only for future proofing. --- docsite/rst/developing_modules.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/developing_modules.rst b/docsite/rst/developing_modules.rst index d3781b2f7fd..5d664d56313 100644 --- a/docsite/rst/developing_modules.rst +++ b/docsite/rst/developing_modules.rst @@ -347,7 +347,7 @@ and guidelines: * In the event of failure, a key of 'failed' should be included, along with a string explanation in 'msg'. Modules that raise tracebacks (stacktraces) are generally considered 'poor' modules, though Ansible can deal with these returns and will automatically convert anything unparseable into a failed result. If you are using the AnsibleModule common Python code, the 'failed' element will be included for you automatically when you call 'fail_json'. -* Return codes from modules are not actually not significant, but continue on with 0=success and non-zero=failure for reasons of future proofing. +* Return codes from modules are actually not significant, but continue on with 0=success and non-zero=failure for reasons of future proofing. * As results from many hosts will be aggregated at once, modules should return only relevant output. Returning the entire contents of a log file is generally bad form. From 946b82bef71d3b2d4ecf07ec937b650634bc84a0 Mon Sep 17 00:00:00 2001 From: Eric Feliksik Date: Wed, 30 Dec 2015 18:21:34 +0100 Subject: [PATCH 0270/1113] shred ansible-vault tmp_file. Also when editor is interruped. --- lib/ansible/parsing/vault/__init__.py | 35 ++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py index d8cf66feca4..b7304d156fe 100644 --- a/lib/ansible/parsing/vault/__init__.py +++ b/lib/ansible/parsing/vault/__init__.py @@ -219,7 +219,27 @@ class VaultEditor: def __init__(self, password): self.vault = VaultLib(password) - + + def _shred_file(self, tmp_path): + """securely destroy a decrypted file.""" + def generate_data(length): + import string, random + chars = string.ascii_lowercase + string.ascii_uppercase + string.digits + return ''.join(random.SystemRandom().choice(chars) for _ in range(length)) + + if not os.path.isfile(tmp_path): + # file is already gone + return + + ld = os.path.getsize(tmp_path) + passes = 3 + with open(tmp_path, "w") as fh: + for _ in range(int(passes)): + data = generate_data(ld) + fh.write(data) + fh.seek(0, 0) + os.remove(tmp_path) + def _edit_file_helper(self, filename, existing_data=None, force_save=False): # Create a tempfile @@ -229,12 +249,18 @@ class VaultEditor: self.write_data(existing_data, tmp_path) # drop the user into an editor on the tmp file - call(self._editor_shell_command(tmp_path)) + try: + call(self._editor_shell_command(tmp_path)) + except: + # whatever happens, destroy the decrypted file + self._shred_file(tmp_path) + raise + tmpdata = self.read_data(tmp_path) # Do nothing if the content has not changed if existing_data == tmpdata and not force_save: - os.remove(tmp_path) + self._shred_file(tmp_path) return # encrypt new data and write out to tmp @@ -329,7 +355,7 @@ class VaultEditor: sys.stdout.write(bytes) else: if os.path.isfile(filename): - os.remove(filename) + self._shred_file(filename) with open(filename, "wb") as fh: fh.write(bytes) @@ -338,6 +364,7 @@ class VaultEditor: # overwrite dest with src if os.path.isfile(dest): prev = os.stat(dest) + # old file 'dest' was encrypted, no need to _shred_file os.remove(dest) shutil.move(src, dest) From e39e8ba308364f16e3b74db96b15415ab97b5f52 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 30 Dec 2015 13:49:39 -0500 Subject: [PATCH 0271/1113] Fix logic mistake in unarchive action plugin --- lib/ansible/plugins/action/unarchive.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/ansible/plugins/action/unarchive.py b/lib/ansible/plugins/action/unarchive.py index cd89b936fed..b6c43a3c595 100644 --- a/lib/ansible/plugins/action/unarchive.py +++ b/lib/ansible/plugins/action/unarchive.py @@ -69,13 +69,13 @@ class ActionModule(ActionBase): source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source) remote_checksum = self._remote_checksum(dest, all_vars=task_vars) - if remote_checksum != '3': + if remote_checksum == '4': result['failed'] = True - result['msg'] = "dest '%s' must be an existing dir" % dest + result['msg'] = "python isn't present on the system. Unable to compute checksum" return result - elif remote_checksum == '4': + elif remote_checksum != '3': result['failed'] = True - result['msg'] = "python isn't present on the system. Unable to compute checksum" + result['msg'] = "dest '%s' must be an existing dir" % dest return result if copy: From 5c34be15b1c800a513a88005c6e6b05f360dfef1 Mon Sep 17 00:00:00 2001 From: Thilo Uttendorfer Date: Thu, 31 Dec 2015 02:31:38 +0100 Subject: [PATCH 0272/1113] Fix unsupported format character --- lib/ansible/utils/module_docs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/utils/module_docs.py b/lib/ansible/utils/module_docs.py index 4a90c3caca1..14a5d030565 100755 --- a/lib/ansible/utils/module_docs.py +++ b/lib/ansible/utils/module_docs.py @@ -67,7 +67,7 @@ def get_docstring(filename, verbose=False): theid = t.id except AttributeError as e: # skip errors can happen when trying to use the normal code - display.warning("Failed to assign id for %t on %s, skipping" % (t, filename)) + display.warning("Failed to assign id for %s on %s, skipping" % (t, filename)) continue if 'DOCUMENTATION' in theid: From c4d2dbfcdbf8743760d658f1bcbec23e912514a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yannig=20Perr=C3=A9?= Date: Fri, 1 Jan 2016 15:55:51 +0100 Subject: [PATCH 0273/1113] Replace to_string by to_unicode. Fix https://github.com/ansible/ansible/issues/13707 --- lib/ansible/inventory/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index 095118e50eb..885005960f5 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -112,7 +112,7 @@ class Inventory(object): try: (host, port) = parse_address(h, allow_ranges=False) except AnsibleError as e: - display.vvv("Unable to parse address from hostname, leaving unchanged: %s" % to_string(e)) + display.vvv("Unable to parse address from hostname, leaving unchanged: %s" % to_unicode(e)) host = h port = None all.add_host(Host(host, port)) From 6f2f7a79b34910a75e6eafde5a7872b3e7bcb770 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Fri, 1 Jan 2016 21:52:41 -0500 Subject: [PATCH 0274/1113] add support for diff in file settings this allows modules to report on what specifically changed when using common file functions --- lib/ansible/module_utils/basic.py | 61 ++++++++++++++++++++++++------- 1 file changed, 48 insertions(+), 13 deletions(-) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index 6fd382aa490..1366bfceb40 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -751,7 +751,7 @@ class AnsibleModule(object): context = self.selinux_default_context(path) return self.set_context_if_different(path, context, False) - def set_context_if_different(self, path, context, changed): + def set_context_if_different(self, path, context, changed, diff=None): if not HAVE_SELINUX or not self.selinux_enabled(): return changed @@ -772,6 +772,14 @@ class AnsibleModule(object): new_context[i] = cur_context[i] if cur_context != new_context: + if diff is not None: + if 'before' not in diff: + diff['before'] = {} + diff['before']['secontext'] = cur_context + if 'after' not in diff: + diff['after'] = {} + diff['after']['secontext'] = new_context + try: if self.check_mode: return True @@ -785,7 +793,7 @@ class AnsibleModule(object): changed = True return changed - def set_owner_if_different(self, path, owner, changed): + def set_owner_if_different(self, path, owner, changed, diff=None): path = os.path.expanduser(path) if owner is None: return changed @@ -798,6 +806,15 @@ class AnsibleModule(object): except KeyError: self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner) if orig_uid != uid: + + if diff is not None: + if 'before' not in diff: + diff['before'] = {} + diff['before']['owner'] = orig_uid + if 'after' not in diff: + diff['after'] = {} + diff['after']['owner'] = uid + if self.check_mode: return True try: @@ -807,7 +824,7 @@ class AnsibleModule(object): changed = True return changed - def set_group_if_different(self, path, group, changed): + def set_group_if_different(self, path, group, changed, diff=None): path = os.path.expanduser(path) if group is None: return changed @@ -820,6 +837,15 @@ class AnsibleModule(object): except KeyError: self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group) if orig_gid != gid: + + if diff is not None: + if 'before' not in diff: + diff['before'] = {} + diff['before']['group'] = orig_gid + if 'after' not in diff: + diff['after'] = {} + diff['after']['group'] = gid + if self.check_mode: return True try: @@ -829,7 +855,7 @@ class AnsibleModule(object): changed = True return changed - def set_mode_if_different(self, path, mode, changed): + def set_mode_if_different(self, path, mode, changed, diff=None): path = os.path.expanduser(path) path_stat = os.lstat(path) @@ -851,6 +877,15 @@ class AnsibleModule(object): prev_mode = stat.S_IMODE(path_stat.st_mode) if prev_mode != mode: + + if diff is not None: + if 'before' not in diff: + diff['before'] = {} + diff['before']['mode'] = prev_mode + if 'after' not in diff: + diff['after'] = {} + diff['after']['mode'] = mode + if self.check_mode: return True # FIXME: comparison against string above will cause this to be executed @@ -984,27 +1019,27 @@ class AnsibleModule(object): or_reduce = lambda mode, perm: mode | user_perms_to_modes[user][perm] return reduce(or_reduce, perms, 0) - def set_fs_attributes_if_different(self, file_args, changed): + def set_fs_attributes_if_different(self, file_args, changed, diff=None): # set modes owners and context as needed changed = self.set_context_if_different( - file_args['path'], file_args['secontext'], changed + file_args['path'], file_args['secontext'], changed, diff ) changed = self.set_owner_if_different( - file_args['path'], file_args['owner'], changed + file_args['path'], file_args['owner'], changed, diff ) changed = self.set_group_if_different( - file_args['path'], file_args['group'], changed + file_args['path'], file_args['group'], changed, diff ) changed = self.set_mode_if_different( - file_args['path'], file_args['mode'], changed + file_args['path'], file_args['mode'], changed, diff ) return changed - def set_directory_attributes_if_different(self, file_args, changed): - return self.set_fs_attributes_if_different(file_args, changed) + def set_directory_attributes_if_different(self, file_args, changed, diff=None): + return self.set_fs_attributes_if_different(file_args, changed, diff) - def set_file_attributes_if_different(self, file_args, changed): - return self.set_fs_attributes_if_different(file_args, changed) + def set_file_attributes_if_different(self, file_args, changed, diff=None): + return self.set_fs_attributes_if_different(file_args, changed, diff) def add_path_info(self, kwargs): ''' From 210cf06d9ac8e62b15d6f34e9c63c1b98986a1d5 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Sat, 2 Jan 2016 00:31:09 -0500 Subject: [PATCH 0275/1113] Tweak how strategies evaluate failed hosts via the iterator and bug fixes * Added additional methods to the iterator code to assess host failures while also taking into account the block rescue/always states * Fixed bugs in the free strategy, where results were not always being processed after being collected * Added some prettier printing to the state output from iterator Fixes #13699 --- lib/ansible/executor/play_iterator.py | 46 ++++++++++++++++++++++++-- lib/ansible/plugins/strategy/free.py | 12 ++----- lib/ansible/plugins/strategy/linear.py | 5 +-- 3 files changed, 49 insertions(+), 14 deletions(-) diff --git a/lib/ansible/executor/play_iterator.py b/lib/ansible/executor/play_iterator.py index 534f216c30a..147e46e5aa7 100644 --- a/lib/ansible/executor/play_iterator.py +++ b/lib/ansible/executor/play_iterator.py @@ -57,14 +57,32 @@ class HostState: self.always_child_state = None def __repr__(self): - return "HOST STATE: block=%d, task=%d, rescue=%d, always=%d, role=%s, run_state=%d, fail_state=%d, pending_setup=%s, tasks child state? %s, rescue child state? %s, always child state? %s" % ( + def _run_state_to_string(n): + states = ["ITERATING_SETUP", "ITERATING_TASKS", "ITERATING_RESCUE", "ITERATING_ALWAYS", "ITERATING_COMPLETE"] + try: + return states[n] + except IndexError: + return "UNKNOWN STATE" + + def _failed_state_to_string(n): + states = {1:"FAILED_SETUP", 2:"FAILED_TASKS", 4:"FAILED_RESCUE", 8:"FAILED_ALWAYS"} + if n == 0: + return "FAILED_NONE" + else: + ret = [] + for i in (1, 2, 4, 8): + if n & i: + ret.append(states[i]) + return "|".join(ret) + + return "HOST STATE: block=%d, task=%d, rescue=%d, always=%d, role=%s, run_state=%s, fail_state=%s, pending_setup=%s, tasks child state? %s, rescue child state? %s, always child state? %s" % ( self.cur_block, self.cur_regular_task, self.cur_rescue_task, self.cur_always_task, self.cur_role, - self.run_state, - self.fail_state, + _run_state_to_string(self.run_state), + _failed_state_to_string(self.fail_state), self.pending_setup, self.tasks_child_state, self.rescue_child_state, @@ -347,6 +365,28 @@ class PlayIterator: def get_failed_hosts(self): return dict((host, True) for (host, state) in iteritems(self._host_states) if state.run_state == self.ITERATING_COMPLETE and state.fail_state != self.FAILED_NONE) + def _check_failed_state(self, state): + if state is None: + return False + elif state.run_state == self.ITERATING_TASKS and self._check_failed_state(state.tasks_child_state): + return True + elif state.run_state == self.ITERATING_RESCUE and self._check_failed_state(state.rescue_child_state): + return True + elif state.run_state == self.ITERATING_ALWAYS and self._check_failed_state(state.always_child_state): + return True + elif state.run_state == self.ITERATING_COMPLETE and state.fail_state != self.FAILED_NONE: + if state.run_state == self.ITERATING_RESCUE and state.fail_state&self.FAILED_RESCUE == 0: + return False + elif state.run_state == self.ITERATING_ALWAYS and state.fail_state&self.FAILED_ALWAYS == 0: + return False + else: + return True + return False + + def is_failed(self, host): + s = self.get_host_state(host) + return self._check_failed_state(s) + def get_original_task(self, host, task): ''' Finds the task in the task list which matches the UUID of the given task. diff --git a/lib/ansible/plugins/strategy/free.py b/lib/ansible/plugins/strategy/free.py index f4fc1226a1f..976d33abba0 100644 --- a/lib/ansible/plugins/strategy/free.py +++ b/lib/ansible/plugins/strategy/free.py @@ -78,7 +78,7 @@ class StrategyModule(StrategyBase): (state, task) = iterator.get_next_task_for_host(host, peek=True) display.debug("free host state: %s" % state) display.debug("free host task: %s" % task) - if host_name not in self._tqm._failed_hosts and host_name not in self._tqm._unreachable_hosts and task: + if not iterator.is_failed(host) and host_name not in self._tqm._unreachable_hosts and task: # set the flag so the outer loop knows we've still found # some work which needs to be done @@ -135,7 +135,7 @@ class StrategyModule(StrategyBase): if last_host == starting_host: break - results = self._process_pending_results(iterator) + results = self._wait_on_pending_results(iterator) host_results.extend(results) try: @@ -176,13 +176,7 @@ class StrategyModule(StrategyBase): display.debug("done adding collected blocks to iterator") # pause briefly so we don't spin lock - time.sleep(0.05) - - try: - results = self._wait_on_pending_results(iterator) - host_results.extend(results) - except Exception as e: - pass + time.sleep(0.001) # run the base class run() method, which executes the cleanup function # and runs any outstanding handlers which have been triggered diff --git a/lib/ansible/plugins/strategy/linear.py b/lib/ansible/plugins/strategy/linear.py index 7bb227dbaea..bfa2c37ce43 100644 --- a/lib/ansible/plugins/strategy/linear.py +++ b/lib/ansible/plugins/strategy/linear.py @@ -54,7 +54,8 @@ class StrategyModule(StrategyBase): host_tasks = {} display.debug("building list of next tasks for hosts") for host in hosts: - host_tasks[host.name] = iterator.get_next_task_for_host(host, peek=True) + if not iterator.is_failed(host): + host_tasks[host.name] = iterator.get_next_task_for_host(host, peek=True) display.debug("done building task lists") num_setups = 0 @@ -98,7 +99,7 @@ class StrategyModule(StrategyBase): rvals = [] display.debug("starting to advance hosts") for host in hosts: - host_state_task = host_tasks[host.name] + host_state_task = host_tasks.get(host.name) if host_state_task is None: continue (s, t) = host_state_task From 7193d27acc7719b25b70eb4709964d0c93796162 Mon Sep 17 00:00:00 2001 From: Eric Feliksik Date: Mon, 4 Jan 2016 17:19:35 +0100 Subject: [PATCH 0276/1113] add os.fsync() so that the shredding data (hopefully) hits the drive --- lib/ansible/parsing/vault/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py index b7304d156fe..1eca0cd5714 100644 --- a/lib/ansible/parsing/vault/__init__.py +++ b/lib/ansible/parsing/vault/__init__.py @@ -235,9 +235,10 @@ class VaultEditor: passes = 3 with open(tmp_path, "w") as fh: for _ in range(int(passes)): + fh.seek(0, 0) data = generate_data(ld) fh.write(data) - fh.seek(0, 0) + os.fsync(fh) os.remove(tmp_path) def _edit_file_helper(self, filename, existing_data=None, force_save=False): From 8599c566701582024c6eaeeb5cf52d249f48a49e Mon Sep 17 00:00:00 2001 From: Michael Scherer Date: Mon, 4 Jan 2016 17:46:40 +0100 Subject: [PATCH 0277/1113] Do not set 'changed' to True when using group_by Since group_by is not changing in any way to the remote system, there is no change. This also make things more consistent with the set_fact plugin. --- lib/ansible/plugins/action/group_by.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/group_by.py b/lib/ansible/plugins/action/group_by.py index a891d3c70d5..99f9db2a88c 100644 --- a/lib/ansible/plugins/action/group_by.py +++ b/lib/ansible/plugins/action/group_by.py @@ -40,6 +40,6 @@ class ActionModule(ActionBase): group_name = self._task.args.get('key') group_name = group_name.replace(' ','-') - result['changed'] = True + result['changed'] = False result['add_group'] = group_name return result From 1e911375e850e79295d053f3e3c45c9d9d247159 Mon Sep 17 00:00:00 2001 From: Eric Feliksik Date: Mon, 4 Jan 2016 18:13:59 +0100 Subject: [PATCH 0278/1113] add docs, remove unnecessary int() cast --- lib/ansible/parsing/vault/__init__.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py index 1eca0cd5714..28e819860ae 100644 --- a/lib/ansible/parsing/vault/__init__.py +++ b/lib/ansible/parsing/vault/__init__.py @@ -221,7 +221,22 @@ class VaultEditor: self.vault = VaultLib(password) def _shred_file(self, tmp_path): - """securely destroy a decrypted file.""" + """Securely destroy a decrypted file + + Inspired by unix `shred', try to destroy the secrets "so that they can be + recovered only with great difficulty with specialised hardware, if at all". + + See https://github.com/ansible/ansible/pull/13700 . + + Note that: + - For flash: overwriting would have no effect (due to wear leveling). But the + added disk wear is considered insignificant. + - For other storage systems: the filesystem lies to the vfs (kernel), the disk + driver lies to the filesystem and the disk lies to the driver. But it's better + than nothing. + - most tmp dirs are now tmpfs (ramdisks), for which this is a non-issue. + """ + def generate_data(length): import string, random chars = string.ascii_lowercase + string.ascii_uppercase + string.digits @@ -234,7 +249,7 @@ class VaultEditor: ld = os.path.getsize(tmp_path) passes = 3 with open(tmp_path, "w") as fh: - for _ in range(int(passes)): + for _ in range(passes): fh.seek(0, 0) data = generate_data(ld) fh.write(data) From de529c17340074b1d96937cf4d688da0a7e3bd31 Mon Sep 17 00:00:00 2001 From: "Fuentes, Christopher" Date: Mon, 4 Jan 2016 13:52:06 -0500 Subject: [PATCH 0279/1113] minor grammar error was making me pull hair out --- docsite/rst/faq.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/rst/faq.rst b/docsite/rst/faq.rst index 90b9a1cb09e..e51a1751fee 100644 --- a/docsite/rst/faq.rst +++ b/docsite/rst/faq.rst @@ -38,7 +38,7 @@ You can also dictate the connection type to be used, if you want:: foo.example.com bar.example.com -You may also wish to keep these in group variables instead, or file in them in a group_vars/ file. +You may also wish to keep these in group variables instead, or file them in a group_vars/ file. See the rest of the documentation for more information about how to organize variables. .. _use_ssh: From 151e09d129d63ce485d42d3f6cf0915bb8bd8cee Mon Sep 17 00:00:00 2001 From: Eric Feliksik Date: Tue, 5 Jan 2016 01:34:45 +0100 Subject: [PATCH 0280/1113] use unix shred if possible, otherwise fast custom impl; do not shred encrypted file --- lib/ansible/parsing/vault/__init__.py | 90 ++++++++++++++++++--------- 1 file changed, 62 insertions(+), 28 deletions(-) diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py index 28e819860ae..bcd038c8b8d 100644 --- a/lib/ansible/parsing/vault/__init__.py +++ b/lib/ansible/parsing/vault/__init__.py @@ -219,41 +219,67 @@ class VaultEditor: def __init__(self, password): self.vault = VaultLib(password) + + def _shred_file_custom(self, tmp_path): + """"Destroy a file, when shred (core-utils) is not available - def _shred_file(self, tmp_path): - """Securely destroy a decrypted file + Unix `shred' destroys files "so that they can be recovered only with great difficulty with + specialised hardware, if at all". It is based on the method from the paper + "Secure Deletion of Data from Magnetic and Solid-State Memory", + Proceedings of the Sixth USENIX Security Symposium (San Jose, California, July 22-25, 1996). - Inspired by unix `shred', try to destroy the secrets "so that they can be - recovered only with great difficulty with specialised hardware, if at all". + We do not go to that length to re-implement shred in Python; instead, overwriting with a block + of random data should suffice. See https://github.com/ansible/ansible/pull/13700 . - - Note that: - - For flash: overwriting would have no effect (due to wear leveling). But the - added disk wear is considered insignificant. - - For other storage systems: the filesystem lies to the vfs (kernel), the disk - driver lies to the filesystem and the disk lies to the driver. But it's better - than nothing. - - most tmp dirs are now tmpfs (ramdisks), for which this is a non-issue. """ - def generate_data(length): - import string, random - chars = string.ascii_lowercase + string.ascii_uppercase + string.digits - return ''.join(random.SystemRandom().choice(chars) for _ in range(length)) + file_len = os.path.getsize(tmp_path) - if not os.path.isfile(tmp_path): - # file is already gone - return - - ld = os.path.getsize(tmp_path) passes = 3 - with open(tmp_path, "w") as fh: + with open(tmp_path, "wb") as fh: for _ in range(passes): fh.seek(0, 0) - data = generate_data(ld) - fh.write(data) + # get a random chunk of data + data = os.urandom(min(1024*1024*2, file_len)) + bytes_todo = file_len + while bytes_todo > 0: + chunk = data[:bytes_todo] + fh.write(chunk) + bytes_todo -= len(chunk) + + assert(fh.tell() == file_len) os.fsync(fh) + + + def _shred_file(self, tmp_path): + """Securely destroy a decrypted file + + Note standard limitations of GNU shred apply (For flash, overwriting would have no effect + due to wear leveling; for other storage systems, the async kernel->filesystem->disk calls never + guarantee data hits the disk; etc). Furthermore, if your tmp dirs is on tmpfs (ramdisks), + it is a non-issue. + + Nevertheless, some form of overwriting the data (instead of just removing the fs index entry) is + a good idea. If shred is not available (e.g. on windows, or no core-utils installed), fall back on + a custom shredding method. + """ + + if not os.path.isfile(tmp_path): + # file is already gone + return + + try: + r = call(['shred', tmp_path]) + except OSError as e: + # shred is not available on this system, or some other error occured. + self._shred_file_custom(tmp_path) + r = 0 + + if r != 0: + # we could not successfully execute unix shred; therefore, do custom shred. + self._shred_file_custom(tmp_path) + os.remove(tmp_path) def _edit_file_helper(self, filename, existing_data=None, force_save=False): @@ -262,7 +288,7 @@ class VaultEditor: _, tmp_path = tempfile.mkstemp() if existing_data: - self.write_data(existing_data, tmp_path) + self.write_data(existing_data, tmp_path, shred=False) # drop the user into an editor on the tmp file try: @@ -300,7 +326,7 @@ class VaultEditor: ciphertext = self.read_data(filename) plaintext = self.vault.decrypt(ciphertext) - self.write_data(plaintext, output_file or filename) + self.write_data(plaintext, output_file or filename, shred=False) def create_file(self, filename): """ create a new encrypted file """ @@ -365,13 +391,21 @@ class VaultEditor: return data - def write_data(self, data, filename): + def write_data(self, data, filename, shred=True): + """write data to given path + + if shred==True, make sure that the original data is first shredded so + that is cannot be recovered + """ bytes = to_bytes(data, errors='strict') if filename == '-': sys.stdout.write(bytes) else: if os.path.isfile(filename): - self._shred_file(filename) + if shred: + self._shred_file(filename) + else: + os.remove(filename) with open(filename, "wb") as fh: fh.write(bytes) From 0d7c3284595c34f53c903995b8dff5fc65303c89 Mon Sep 17 00:00:00 2001 From: John Mitchell Date: Mon, 4 Jan 2016 19:52:37 -0500 Subject: [PATCH 0281/1113] fixed css minification make target for docsite --- docsite/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/Makefile b/docsite/Makefile index 92129f78514..15347f84bf9 100644 --- a/docsite/Makefile +++ b/docsite/Makefile @@ -43,4 +43,4 @@ modules: $(FORMATTER) ../hacking/templates/rst.j2 PYTHONPATH=../lib $(FORMATTER) -t rst --template-dir=../hacking/templates --module-dir=../lib/ansible/modules -o rst/ staticmin: - cat _themes/srtd/static/css/theme.css | sed -e 's/^[ \t]*//g; s/[ \t]*$$//g; s/\([:{;,]\) /\1/g; s/ {/{/g; s/\/\*.*\*\///g; /^$$/d' | sed -e :a -e '$$!N; s/\n\(.\)/\1/; ta' > _themes/srtd/static/css/theme.min.css + cat _themes/srtd/static/css/theme.css | sed -e 's/^[ ]*//g; s/[ ]*$$//g; s/\([:{;,]\) /\1/g; s/ {/{/g; s/\/\*.*\*\///g; /^$$/d' | sed -e :a -e '$$!N; s/\n\(.\)/\1/; ta' > _themes/srtd/static/css/theme.min.css From 692ef6dcc90cf696b4bc25bedb979150adf6e7b9 Mon Sep 17 00:00:00 2001 From: John Mitchell Date: Mon, 4 Jan 2016 19:58:51 -0500 Subject: [PATCH 0282/1113] made docsite ads configurable by marketing --- docsite/_themes/srtd/layout.html | 22 ++++++++++++---------- docsite/_themes/srtd/static/css/theme.css | 21 ++------------------- 2 files changed, 14 insertions(+), 29 deletions(-) diff --git a/docsite/_themes/srtd/layout.html b/docsite/_themes/srtd/layout.html index 16f0d8d2663..1408be8165d 100644 --- a/docsite/_themes/srtd/layout.html +++ b/docsite/_themes/srtd/layout.html @@ -166,7 +166,7 @@

- +
@@ -189,15 +189,17 @@
- - -
- -
-
- -
-
+ + {% include "breadcrumbs.html" %}
diff --git a/docsite/_themes/srtd/static/css/theme.css b/docsite/_themes/srtd/static/css/theme.css index 4f7cbc8caaf..246e513b799 100644 --- a/docsite/_themes/srtd/static/css/theme.css +++ b/docsite/_themes/srtd/static/css/theme.css @@ -4723,33 +4723,16 @@ span[id*='MathJax-Span'] { padding: 0.4045em 1.618em; } - .DocSiteBanner { - width: 100%; display: flex; display: -webkit-flex; + justify-content: center; + -webkit-justify-content: center; flex-wrap: wrap; -webkit-flex-wrap: wrap; - justify-content: space-between; - -webkit-justify-content: space-between; - background-color: #ff5850; margin-bottom: 25px; } .DocSiteBanner-imgWrapper { max-width: 100%; } - -@media screen and (max-width: 1403px) { - .DocSiteBanner { - width: 100%; - display: flex; - display: -webkit-flex; - flex-wrap: wrap; - -webkit-flex-wrap: wrap; - justify-content: center; - -webkit-justify-content: center; - background-color: #fff; - margin-bottom: 25px; - } -} From 1c3b16c2ddf42c687738687cbc1a708cd05d2112 Mon Sep 17 00:00:00 2001 From: John Mitchell Date: Mon, 4 Jan 2016 20:02:01 -0500 Subject: [PATCH 0283/1113] udpate copyright date --- docsite/_themes/srtd/footer.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docsite/_themes/srtd/footer.html b/docsite/_themes/srtd/footer.html index b70cfde7ad8..30b02a8978b 100644 --- a/docsite/_themes/srtd/footer.html +++ b/docsite/_themes/srtd/footer.html @@ -13,7 +13,7 @@

- © Copyright 2015 Ansible, Inc.. + © Copyright 2016 Ansible, Inc.. {%- if last_updated %} {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} From 559ba467c09b112ecd7dc8681888b6631fcacba3 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Tue, 22 Dec 2015 11:11:50 -0800 Subject: [PATCH 0284/1113] Revert "Convert to bytes later so that make_become_command can jsut operate on text type." This reverts commit c4da5840b5e38aea1740e68f7100256c93dfbb17. Going to do this in the connection plugins --- lib/ansible/plugins/action/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index 5383f8afd43..e54898b6db3 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -488,6 +488,8 @@ class ActionBase(with_metaclass(ABCMeta, object)): verbatim, then this won't work. May have to use some sort of replacement strategy (python3 could use surrogateescape) ''' + # We may need to revisit this later. + cmd = to_bytes(cmd, errors='strict') if executable is not None: cmd = executable + ' -c ' + cmd @@ -504,7 +506,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): cmd = self._play_context.make_become_cmd(cmd, executable=executable) display.debug("_low_level_execute_command(): executing: %s" % (cmd,)) - rc, stdout, stderr = self._connection.exec_command(to_bytes(cmd, errors='strict'), in_data=in_data, sudoable=sudoable) + rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable) # stdout and stderr may be either a file-like or a bytes object. # Convert either one to a text type From 1ed3a018eb27dd06b08dbad57a162c2865abb635 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Tue, 22 Dec 2015 11:12:14 -0800 Subject: [PATCH 0285/1113] Revert "Fix make tests-py3 on devel. Fix for https://github.com/ansible/ansible/issues/13638." This reverts commit e70061334aa99bee466295980f4cd4146096dc29. Going to do this in the connection plugins --- test/units/plugins/action/test_action.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/units/plugins/action/test_action.py b/test/units/plugins/action/test_action.py index dcd04375959..0e47b6a5381 100644 --- a/test/units/plugins/action/test_action.py +++ b/test/units/plugins/action/test_action.py @@ -42,14 +42,14 @@ class TestActionBase(unittest.TestCase): play_context.become = True play_context.become_user = play_context.remote_user = 'root' - play_context.make_become_cmd = Mock(return_value=b'CMD') + play_context.make_become_cmd = Mock(return_value='CMD') - action_base._low_level_execute_command(b'ECHO', sudoable=True) + action_base._low_level_execute_command('ECHO', sudoable=True) play_context.make_become_cmd.assert_not_called() play_context.remote_user = 'apo' - action_base._low_level_execute_command(b'ECHO', sudoable=True) - play_context.make_become_cmd.assert_called_once_with(b'ECHO', executable=None) + action_base._low_level_execute_command('ECHO', sudoable=True) + play_context.make_become_cmd.assert_called_once_with('ECHO', executable=None) play_context.make_become_cmd.reset_mock() @@ -57,7 +57,7 @@ class TestActionBase(unittest.TestCase): C.BECOME_ALLOW_SAME_USER = True try: play_context.remote_user = 'root' - action_base._low_level_execute_command(b'ECHO SAME', sudoable=True) - play_context.make_become_cmd.assert_called_once_with(b'ECHO SAME', executable=None) + action_base._low_level_execute_command('ECHO SAME', sudoable=True) + play_context.make_become_cmd.assert_called_once_with('ECHO SAME', executable=None) finally: C.BECOME_ALLOW_SAME_USER = become_allow_same_user From 8d57ffd16bd1025f7b04127fec760c13aca6d6dd Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Tue, 22 Dec 2015 11:12:41 -0800 Subject: [PATCH 0286/1113] Revert "Transform the command we pass to subprocess into a byte string in _low_level-exec_command" This reverts commit 0c013f592a31c06baac7aadf27d23598f6abe931. Going to do this in the connection plugin --- lib/ansible/plugins/action/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index e54898b6db3..3f4fff588e9 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -488,8 +488,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): verbatim, then this won't work. May have to use some sort of replacement strategy (python3 could use surrogateescape) ''' - # We may need to revisit this later. - cmd = to_bytes(cmd, errors='strict') + if executable is not None: cmd = executable + ' -c ' + cmd From 9e32099b5e0535c2daf656e9d619e9a2efe9d3b6 Mon Sep 17 00:00:00 2001 From: Bruno Almeida do Lago Date: Tue, 5 Jan 2016 16:48:49 +1300 Subject: [PATCH 0287/1113] Added OpenStack dynamic inventory example Added an example illustrating how to use the OpenStack dynamic inventory script to the "Dynamic Inventory" section. --- docsite/rst/intro_dynamic_inventory.rst | 71 +++++++++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/docsite/rst/intro_dynamic_inventory.rst b/docsite/rst/intro_dynamic_inventory.rst index 5f491ebc2ef..85feaa143bd 100644 --- a/docsite/rst/intro_dynamic_inventory.rst +++ b/docsite/rst/intro_dynamic_inventory.rst @@ -206,6 +206,77 @@ explicitly clear the cache, you can run the ec2.py script with the ``--refresh-c # ./ec2.py --refresh-cache +.. _openstack_example: + +Example: OpenStack External Inventory Script +```````````````````````````````````````````` + +If you use an OpenStack based cloud, instead of manually maintaining your own inventory file, you can use the openstack.py dynamic inventory to pull information about your compute instances directly from OpenStack. + +You can download the latest version of the OpenStack inventory script at: https://raw.githubusercontent.com/ansible/ansible/devel/contrib/inventory/openstack.py + +You can use the inventory script explicitly (by passing the `-i openstack.py` argument to Ansible) or implicitly (by placing the script at `/etc/ansible/hosts`). + +Explicit use of inventory script +++++++++++++++++++++++++++++++++ + +Download the latest version of the OpenStack dynamic inventory script and make it executable:: + + wget https://raw.githubusercontent.com/ansible/ansible/devel/contrib/inventory/openstack.py + chmod +x openstack.py + +Source an OpenStack RC file:: + + source openstack.rc + +.. note:: + + An OpenStack RC file contains the environment variables required by the client tools to establish a connection with the cloud provider, such as the authentication URL, user name, password and region name. For more information on how to download, create or source an OpenStack RC file, please refer to http://docs.openstack.org/cli-reference/content/cli_openrc.html. + +You can confirm the file has been successfully sourced by running a simple command, such as `nova list` and ensuring it return no errors. + +.. note:: + + The OpenStack command line clients are required to run the `nova list` command. For more information on how to install them, please refer to http://docs.openstack.org/cli-reference/content/install_clients.html. + +You can test the OpenStack dynamic inventory script manually to confirm it is working as expected:: + + ./openstack.py --list + +After a few moments you should see some JSON output with information about your compute instances. + +Once you confirm the dynamic inventory script is working as expected, you can tell Ansible to use the `openstack.py` script as an inventory file, as illustrated below:: + +ansible -i openstack.py all -m ping + +Implicit use of inventory script +++++++++++++++++++++++++++++++++ + +Download the latest version of the OpenStack dynamic inventory script, make it executable and copy it to `/etc/ansible/hosts`:: + + wget https://raw.githubusercontent.com/ansible/ansible/devel/contrib/inventory/openstack.py + chmod +x openstack.py + sudo cp openstack.py /etc/ansible/hosts + +Download the sample configuration file, modify it to suit your needs and copy it to /etc/ansible/openstack.yml + + wget https://raw.githubusercontent.com/ansible/ansible/devel/contrib/inventory/openstack.yml + vi openstack.yml + sudo cp openstack.yml /etc/ansible/ + +You can test the OpenStack dynamic inventory script manually to confirm it is working as expected:: + + /etc/ansible/hosts --list + +After a few moments you should see some JSON output with information about your compute instances. + +Refresh the cache ++++++++++++++++++ + +Note that the OpenStack dynamic inventory script will cache results to avoid repeated API calls. To explicitly clear the cache, you can run the openstack.py (or hosts) script with the --refresh parameter: + + ./openstack.py --refresh + .. _other_inventory_scripts: Other inventory scripts From c0a8cd950b909983cdc763f80495595d68597089 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Mon, 4 Jan 2016 19:23:12 -0800 Subject: [PATCH 0288/1113] Fix problems with non-ascii values passed as part of the command to connection plugins @drybjed discovered this with non-ascii environment variables and command line arguments to script and raw module. --- lib/ansible/plugins/connection/__init__.py | 1 + lib/ansible/plugins/connection/chroot.py | 2 + lib/ansible/plugins/connection/docker.py | 7 ++- lib/ansible/plugins/connection/jail.py | 6 ++- lib/ansible/plugins/connection/libvirt_lxc.py | 6 ++- lib/ansible/plugins/connection/local.py | 11 ++++- lib/ansible/plugins/connection/ssh.py | 17 +++++-- lib/ansible/plugins/connection/zone.py | 8 ++-- test/integration/unicode-test-script | 7 +++ test/integration/unicode.yml | 45 +++++++++++++++++++ 10 files changed, 97 insertions(+), 13 deletions(-) create mode 100755 test/integration/unicode-test-script diff --git a/lib/ansible/plugins/connection/__init__.py b/lib/ansible/plugins/connection/__init__.py index 06616bac4ca..ff00bc02380 100644 --- a/lib/ansible/plugins/connection/__init__.py +++ b/lib/ansible/plugins/connection/__init__.py @@ -91,6 +91,7 @@ class ConnectionBase(with_metaclass(ABCMeta, object)): @property def connected(self): + '''Read-only property holding whether the connection to the remote host is active or closed.''' return self._connected def _become_method_supported(self): diff --git a/lib/ansible/plugins/connection/chroot.py b/lib/ansible/plugins/connection/chroot.py index c86ea1fc355..ba41ffb5d88 100644 --- a/lib/ansible/plugins/connection/chroot.py +++ b/lib/ansible/plugins/connection/chroot.py @@ -30,6 +30,7 @@ from ansible import constants as C from ansible.errors import AnsibleError from ansible.plugins.connection import ConnectionBase from ansible.module_utils.basic import is_executable +from ansible.utils.unicode import to_bytes try: from __main__ import display @@ -90,6 +91,7 @@ class Connection(ConnectionBase): local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd] display.vvv("EXEC %s" % (local_cmd), host=self.chroot) + local_cmd = map(to_bytes, local_cmd) p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/lib/ansible/plugins/connection/docker.py b/lib/ansible/plugins/connection/docker.py index 4e08f56a095..ce556a1431b 100644 --- a/lib/ansible/plugins/connection/docker.py +++ b/lib/ansible/plugins/connection/docker.py @@ -36,6 +36,7 @@ from distutils.version import LooseVersion import ansible.constants as C from ansible.errors import AnsibleError, AnsibleFileNotFound from ansible.plugins.connection import ConnectionBase +from ansible.utils.unicode import to_bytes try: from __main__ import display @@ -125,7 +126,8 @@ class Connection(ConnectionBase): # -i is needed to keep stdin open which allows pipelining to work local_cmd = [self.docker_cmd, "exec", '-i', self._play_context.remote_addr, executable, '-c', cmd] - display.vvv("EXEC %s" % (local_cmd), host=self._play_context.remote_addr) + display.vvv("EXEC %s" % (local_cmd,), host=self._play_context.remote_addr) + local_cmd = map(to_bytes, local_cmd) p = subprocess.Popen(local_cmd, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -159,6 +161,7 @@ class Connection(ConnectionBase): if self.can_copy_bothways: # only docker >= 1.8.1 can do this natively args = [ self.docker_cmd, "cp", in_path, "%s:%s" % (self._play_context.remote_addr, out_path) ] + args = map(to_bytes, args) p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: @@ -170,6 +173,7 @@ class Connection(ConnectionBase): executable = C.DEFAULT_EXECUTABLE.split()[0] if C.DEFAULT_EXECUTABLE else '/bin/sh' args = [self.docker_cmd, "exec", "-i", self._play_context.remote_addr, executable, "-c", "dd of={0} bs={1}".format(out_path, BUFSIZE)] + args = map(to_bytes, args) with open(in_path, 'rb') as in_file: try: p = subprocess.Popen(args, stdin=in_file, @@ -192,6 +196,7 @@ class Connection(ConnectionBase): out_dir = os.path.dirname(out_path) args = [self.docker_cmd, "cp", "%s:%s" % (self._play_context.remote_addr, in_path), out_dir] + args = map(to_bytes, args) p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/lib/ansible/plugins/connection/jail.py b/lib/ansible/plugins/connection/jail.py index e665692543a..8f88b6ad28f 100644 --- a/lib/ansible/plugins/connection/jail.py +++ b/lib/ansible/plugins/connection/jail.py @@ -30,6 +30,7 @@ import traceback from ansible import constants as C from ansible.errors import AnsibleError from ansible.plugins.connection import ConnectionBase +from ansible.utils.unicode import to_bytes try: from __main__ import display @@ -83,7 +84,7 @@ class Connection(ConnectionBase): return stdout.split() def get_jail_path(self): - p = subprocess.Popen([self.jls_cmd, '-j', self.jail, '-q', 'path'], + p = subprocess.Popen([self.jls_cmd, '-j', to_bytes(self.jail), '-q', 'path'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -109,7 +110,8 @@ class Connection(ConnectionBase): executable = C.DEFAULT_EXECUTABLE.split()[0] if C.DEFAULT_EXECUTABLE else '/bin/sh' local_cmd = [self.jexec_cmd, self.jail, executable, '-c', cmd] - display.vvv("EXEC %s" % (local_cmd), host=self.jail) + display.vvv("EXEC %s" % (local_cmd,), host=self.jail) + local_cmd = map(to_bytes, local_cmd) p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/lib/ansible/plugins/connection/libvirt_lxc.py b/lib/ansible/plugins/connection/libvirt_lxc.py index dc82d984040..3bfff8b1c35 100644 --- a/lib/ansible/plugins/connection/libvirt_lxc.py +++ b/lib/ansible/plugins/connection/libvirt_lxc.py @@ -30,6 +30,7 @@ import traceback from ansible import constants as C from ansible.errors import AnsibleError from ansible.plugins.connection import ConnectionBase +from ansible.utils.unicode import to_bytes try: from __main__ import display @@ -65,7 +66,7 @@ class Connection(ConnectionBase): return cmd def _check_domain(self, domain): - p = subprocess.Popen([self.virsh, '-q', '-c', 'lxc:///', 'dominfo', domain], + p = subprocess.Popen([self.virsh, '-q', '-c', 'lxc:///', 'dominfo', to_bytes(domain)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p.communicate() if p.returncode: @@ -89,7 +90,8 @@ class Connection(ConnectionBase): executable = C.DEFAULT_EXECUTABLE.split()[0] if C.DEFAULT_EXECUTABLE else '/bin/sh' local_cmd = [self.virsh, '-q', '-c', 'lxc:///', 'lxc-enter-namespace', self.lxc, '--', executable , '-c', cmd] - display.vvv("EXEC %s" % (local_cmd), host=self.lxc) + display.vvv("EXEC %s" % (local_cmd,), host=self.lxc) + local_cmd = map(to_bytes, local_cmd) p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/lib/ansible/plugins/connection/local.py b/lib/ansible/plugins/connection/local.py index e69281d0f3b..29b1e9a5ca2 100644 --- a/lib/ansible/plugins/connection/local.py +++ b/lib/ansible/plugins/connection/local.py @@ -25,10 +25,13 @@ import select import fcntl import getpass +from ansible.compat.six import text_type, binary_type + import ansible.constants as C from ansible.errors import AnsibleError, AnsibleFileNotFound from ansible.plugins.connection import ConnectionBase +from ansible.utils.unicode import to_bytes try: from __main__ import display @@ -69,9 +72,15 @@ class Connection(ConnectionBase): raise AnsibleError("Internal Error: this module does not support optimized module pipelining") executable = C.DEFAULT_EXECUTABLE.split()[0] if C.DEFAULT_EXECUTABLE else None - display.vvv("{0} EXEC {1}".format(self._play_context.remote_addr, cmd)) + display.vvv(u"{0} EXEC {1}".format(self._play_context.remote_addr, cmd)) # FIXME: cwd= needs to be set to the basedir of the playbook display.debug("opening command with Popen()") + + if isinstance(cmd, (text_type, binary_type)): + cmd = to_bytes(cmd) + else: + cmd = map(to_bytes, cmd) + p = subprocess.Popen( cmd, shell=isinstance(cmd, basestring), diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index a2abcf20aee..074f6aaa8ae 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -33,6 +33,7 @@ from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNo from ansible.plugins.connection import ConnectionBase from ansible.utils.path import unfrackpath, makedirs_safe from ansible.utils.unicode import to_bytes, to_unicode +from ansible.compat.six import text_type, binary_type try: from __main__ import display @@ -320,7 +321,7 @@ class Connection(ConnectionBase): ''' display_cmd = map(pipes.quote, cmd) - display.vvv('SSH: EXEC {0}'.format(' '.join(display_cmd)), host=self.host) + display.vvv(u'SSH: EXEC {0}'.format(u' '.join(display_cmd)), host=self.host) # Start the given command. If we don't need to pipeline data, we can try # to use a pseudo-tty (ssh will have been invoked with -tt). If we are @@ -328,6 +329,12 @@ class Connection(ConnectionBase): # old pipes. p = None + + if isinstance(cmd, (text_type, binary_type)): + cmd = to_bytes(cmd) + else: + cmd = map(to_bytes, cmd) + if not in_data: try: # Make sure stdin is a proper pty to avoid tcgetattr errors @@ -365,7 +372,7 @@ class Connection(ConnectionBase): # only when using ssh. Otherwise we can send initial data straightaway. state = states.index('ready_to_send') - if 'ssh' in cmd: + if b'ssh' in cmd: if self._play_context.prompt: # We're requesting escalation with a password, so we have to # wait for a password prompt. @@ -538,7 +545,7 @@ class Connection(ConnectionBase): stdin.close() if C.HOST_KEY_CHECKING: - if cmd[0] == "sshpass" and p.returncode == 6: + if cmd[0] == b"sshpass" and p.returncode == 6: raise AnsibleError('Using a SSH password instead of a key is not possible because Host Key checking is enabled and sshpass does not support this. Please add this host\'s fingerprint to your known_hosts file to manage this host.') controlpersisterror = 'Bad configuration option: ControlPersist' in stderr or 'unknown configuration option: ControlPersist' in stderr @@ -600,7 +607,7 @@ class Connection(ConnectionBase): raise AnsibleConnectionFailure("Failed to connect to the host via ssh.") except (AnsibleConnectionFailure, Exception) as e: if attempt == remaining_tries - 1: - raise e + raise else: pause = 2 ** attempt - 1 if pause > 30: @@ -674,6 +681,8 @@ class Connection(ConnectionBase): # temporarily disabled as we are forced to currently close connections after every task because of winrm # if self._connected and self._persistent: # cmd = self._build_command('ssh', '-O', 'stop', self.host) + # + # cmd = map(to_bytes, cmd) # p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # stdout, stderr = p.communicate() diff --git a/lib/ansible/plugins/connection/zone.py b/lib/ansible/plugins/connection/zone.py index 75d7db545d6..b65c80b73fb 100644 --- a/lib/ansible/plugins/connection/zone.py +++ b/lib/ansible/plugins/connection/zone.py @@ -31,6 +31,7 @@ import traceback from ansible import constants as C from ansible.errors import AnsibleError from ansible.plugins.connection import ConnectionBase +from ansible.utils import to_bytes try: from __main__ import display @@ -56,8 +57,8 @@ class Connection(ConnectionBase): if os.geteuid() != 0: raise AnsibleError("zone connection requires running as root") - self.zoneadm_cmd = self._search_executable('zoneadm') - self.zlogin_cmd = self._search_executable('zlogin') + self.zoneadm_cmd = to_bytes(self._search_executable('zoneadm')) + self.zlogin_cmd = to_bytes(self._search_executable('zlogin')) if self.zone not in self.list_zones(): raise AnsibleError("incorrect zone name %s" % self.zone) @@ -86,7 +87,7 @@ class Connection(ConnectionBase): def get_zone_path(self): #solaris10vm# zoneadm -z cswbuild list -p #-:cswbuild:installed:/zones/cswbuild:479f3c4b-d0c6-e97b-cd04-fd58f2c0238e:native:shared - process = subprocess.Popen([self.zoneadm_cmd, '-z', self.zone, 'list', '-p'], + process = subprocess.Popen([self.zoneadm_cmd, '-z', to_bytes(self.zone), 'list', '-p'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -113,6 +114,7 @@ class Connection(ConnectionBase): # this through /bin/sh -c here. Instead it goes through the shell # that zlogin selects. local_cmd = [self.zlogin_cmd, self.zone, cmd] + local_cmd = map(to_bytes, local_cmd) display.vvv("EXEC %s" % (local_cmd), host=self.zone) p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, diff --git a/test/integration/unicode-test-script b/test/integration/unicode-test-script new file mode 100755 index 00000000000..340f2a9f5b2 --- /dev/null +++ b/test/integration/unicode-test-script @@ -0,0 +1,7 @@ +#!/bin/sh + +echo "Non-ascii arguments:" +echo $@ + +echo "Non-ascii Env var:" +echo $option diff --git a/test/integration/unicode.yml b/test/integration/unicode.yml index 6e8e073a79d..f38bf8f5e86 100644 --- a/test/integration/unicode.yml +++ b/test/integration/unicode.yml @@ -49,6 +49,51 @@ that: - "'¯ ° ± ² ³ ´ µ ¶ · ¸ ¹ º » ¼ ½ ¾ ¿ À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï Ð Ñ Ò Ó Ô Õ Ö ×' in output.stdout_lines" + - name: Run raw with non-ascii options + raw: "/bin/echo Zażółć gęślą jaźń" + register: results + + - name: Check that raw output the right thing + assert: + that: + - "'Zażółć gęślą jaźń' in results.stdout_lines" + + - name: Run a script with non-ascii options and environment + script: unicode-test-script --option "Zażółć gęślą jaźń" + environment: + option: Zażółć + register: results + + - name: Check that script output includes the nonascii arguments and environment values + assert: + that: + - "'--option Zażółć gęślą jaźń' in results.stdout_lines" + - "'Zażółć' in results.stdout_lines" + + - name: Ping with non-ascii environment variable and option + ping: + data: "Zażółć gęślą jaźń" + environment: + option: Zażółć + register: results + + - name: Check that ping with non-ascii data was correct + assert: + that: + - "'Zażółć gęślą jaźń' == results.ping" + + - name: Command that echos a non-ascii env var + command: "echo $option" + environment: + option: Zażółć + register: results + + - name: Check that a non-ascii env var was passed to the command module + assert: + that: + - "'Zażółć' in results.stdout_lines" + + - name: 'A play for hosts in group: ĪīĬĭ' hosts: 'ĪīĬĭ' gather_facts: true From 6470f7de2cf4cfc37fa5fef66c7e37514b6139d3 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Tue, 5 Jan 2016 07:53:22 -0800 Subject: [PATCH 0289/1113] Update submodule refs --- lib/ansible/modules/core | 2 +- lib/ansible/modules/extras | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core index 002028748f0..33014c6db1c 160000 --- a/lib/ansible/modules/core +++ b/lib/ansible/modules/core @@ -1 +1 @@ -Subproject commit 002028748f080961ade801c30e194bfd4ba043ce +Subproject commit 33014c6db1ce757d0ffa065e6c9924ac4db1cacc diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras index f6a7b6dd1f7..82a4cf84be8 160000 --- a/lib/ansible/modules/extras +++ b/lib/ansible/modules/extras @@ -1 +1 @@ -Subproject commit f6a7b6dd1f7be93ba640c50bf26adeeabb5af46f +Subproject commit 82a4cf84be82244d0cf7d043c8cbb4f176f086db From 11ce08b9dde32c7e4b51a6fffc22f301c81181be Mon Sep 17 00:00:00 2001 From: Eric Feliksik Date: Tue, 5 Jan 2016 18:04:38 +0100 Subject: [PATCH 0290/1113] cleaner implementation and random chunk length. --- lib/ansible/parsing/vault/__init__.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py index bcd038c8b8d..1d4eeef4653 100644 --- a/lib/ansible/parsing/vault/__init__.py +++ b/lib/ansible/parsing/vault/__init__.py @@ -22,6 +22,7 @@ import shlex import shutil import sys import tempfile +import random from io import BytesIO from subprocess import call from ansible.errors import AnsibleError @@ -235,20 +236,21 @@ class VaultEditor: """ file_len = os.path.getsize(tmp_path) + max_chunk_len = min(1024*1024*2, file_len) passes = 3 with open(tmp_path, "wb") as fh: for _ in range(passes): fh.seek(0, 0) - # get a random chunk of data - data = os.urandom(min(1024*1024*2, file_len)) - bytes_todo = file_len - while bytes_todo > 0: - chunk = data[:bytes_todo] - fh.write(chunk) - bytes_todo -= len(chunk) - - assert(fh.tell() == file_len) + # get a random chunk of data, each pass with other length + chunk_len = random.randint(max_chunk_len/2, max_chunk_len) + data = os.urandom(chunk_len) + + for _ in range(0, file_len // chunk_len): + fh.write(data) + fh.write(data[:file_len % chunk_len]) + + assert(fh.tell() == file_len) # FIXME remove this assert once we have unittests to check its accuracy os.fsync(fh) @@ -273,13 +275,12 @@ class VaultEditor: r = call(['shred', tmp_path]) except OSError as e: # shred is not available on this system, or some other error occured. - self._shred_file_custom(tmp_path) - r = 0 + r = 1 if r != 0: # we could not successfully execute unix shred; therefore, do custom shred. self._shred_file_custom(tmp_path) - + os.remove(tmp_path) def _edit_file_helper(self, filename, existing_data=None, force_save=False): From 9972c27a9bc1dd2c9051368e082e2b366a04acbe Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 4 Jan 2016 18:44:09 -0500 Subject: [PATCH 0291/1113] now handles 'non file diffs' this allows modules to pass back a 'diff' dict and it will still show using the file interface --- lib/ansible/plugins/callback/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py index cc2a9ad0e75..faf04b1180f 100644 --- a/lib/ansible/plugins/callback/__init__.py +++ b/lib/ansible/plugins/callback/__init__.py @@ -116,6 +116,10 @@ class CallbackBase: if 'src_larger' in diff: ret.append("diff skipped: source file size is greater than %d\n" % diff['src_larger']) if 'before' in diff and 'after' in diff: + # format complex structures into 'files' + for x in ['before', 'after']: + if isinstance(diff[x], dict): + diff[x] = json.dumps(diff[x], sort_keys=True, indent=4) if 'before_header' in diff: before_header = "before: %s" % diff['before_header'] else: From f3c45adfb8670701d0b19e86787a5213bb5afb5f Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Mon, 4 Jan 2016 19:58:06 -0500 Subject: [PATCH 0292/1113] simplified diff handling in callback no need for the copy or other complexity --- lib/ansible/plugins/callback/default.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/lib/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py index e515945bba5..276ac435f4b 100644 --- a/lib/ansible/plugins/callback/default.py +++ b/lib/ansible/plugins/callback/default.py @@ -137,11 +137,8 @@ class CallbackModule(CallbackBase): def v2_on_file_diff(self, result): if result._task.loop and 'results' in result._result: for res in result._result['results']: - newres = self._copy_result(result) - res['item'] = self._get_item(res) - newres._result = res - - self.v2_on_file_diff(newres) + if 'diff' in res: + self._display.display(self._get_diff(res['diff'])) elif 'diff' in result._result and result._result['diff']: self._display.display(self._get_diff(result._result['diff'])) From a65543bbafbd328e7848a99d2a570f71c43a53a0 Mon Sep 17 00:00:00 2001 From: Charles Paul Date: Tue, 5 Jan 2016 14:52:06 -0600 Subject: [PATCH 0293/1113] adding password no_log and cleaning up argument spec --- lib/ansible/module_utils/vca.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/module_utils/vca.py b/lib/ansible/module_utils/vca.py index ef89d545569..9737cca8b47 100644 --- a/lib/ansible/module_utils/vca.py +++ b/lib/ansible/module_utils/vca.py @@ -35,8 +35,8 @@ class VcaError(Exception): def vca_argument_spec(): return dict( - username=dict(), - password=dict(), + username=dict(type='str', aliases=['user'], required=True), + password=dict(type='str', aliases=['pass','passwd'], required=True, no_log=True), org=dict(), service_id=dict(), instance_id=dict(), From dc47c25e589f1c2b1f44867076624f0e0564b7c6 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Tue, 5 Jan 2016 22:01:01 -0500 Subject: [PATCH 0294/1113] Minor tweak to ensure diff is not empty in callback for file diffs --- lib/ansible/plugins/callback/default.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py index 6ca728e65f8..dfad6579343 100644 --- a/lib/ansible/plugins/callback/default.py +++ b/lib/ansible/plugins/callback/default.py @@ -137,7 +137,7 @@ class CallbackModule(CallbackBase): def v2_on_file_diff(self, result): if result._task.loop and 'results' in result._result: for res in result._result['results']: - if 'diff' in res: + if 'diff' in res and res['diff']: self._display.display(self._get_diff(res['diff'])) elif 'diff' in result._result and result._result['diff']: self._display.display(self._get_diff(result._result['diff'])) From 7c8374e0f8e153368bb6a22caf7b7ada07f8d797 Mon Sep 17 00:00:00 2001 From: Abhijit Menon-Sen Date: Wed, 6 Jan 2016 20:44:19 +0530 Subject: [PATCH 0295/1113] Strip string terms before templating The earlier code did call terms.strip(), but ignored the return value instead of passing that in to templar.template(). Clearly an oversight. --- lib/ansible/utils/listify.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/ansible/utils/listify.py b/lib/ansible/utils/listify.py index 7fe83a8fa0c..d834737ab58 100644 --- a/lib/ansible/utils/listify.py +++ b/lib/ansible/utils/listify.py @@ -31,9 +31,8 @@ __all__ = ['listify_lookup_plugin_terms'] def listify_lookup_plugin_terms(terms, templar, loader, fail_on_undefined=False, convert_bare=True): if isinstance(terms, string_types): - stripped = terms.strip() # TODO: warn/deprecation on bare vars in with_ so we can eventually remove fail on undefined override - terms = templar.template(terms, convert_bare=convert_bare, fail_on_undefined=fail_on_undefined) + terms = templar.template(terms.strip(), convert_bare=convert_bare, fail_on_undefined=fail_on_undefined) else: terms = templar.template(terms, fail_on_undefined=fail_on_undefined) From 11b55be5bbb90b2bc917b2637d6fcdbe1a15092d Mon Sep 17 00:00:00 2001 From: muffl0n Date: Thu, 20 Aug 2015 10:31:48 +0200 Subject: [PATCH 0296/1113] Show version without supplying a dummy action fixes #12004 parsing x2 does not seem to break anything --- lib/ansible/cli/galaxy.py | 7 +++++-- lib/ansible/cli/vault.py | 3 +++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 476a7d0f897..a022d17859c 100644 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -50,7 +50,7 @@ class GalaxyCLI(CLI): SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" ) VALID_ACTIONS = ("delete", "import", "info", "init", "install", "list", "login", "remove", "search", "setup") - + def __init__(self, args): self.api = None self.galaxy = None @@ -64,6 +64,9 @@ class GalaxyCLI(CLI): epilog = "\nSee '%s --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0]) ) + # Workaround for #12004: show version without supplying a dummy action + self.parser.parse_args() + self.set_action() # options specific to actions @@ -141,7 +144,7 @@ class GalaxyCLI(CLI): return True def run(self): - + super(GalaxyCLI, self).run() # if not offline, get connect to galaxy api diff --git a/lib/ansible/cli/vault.py b/lib/ansible/cli/vault.py index 9908f17e578..50a6fdebdc8 100644 --- a/lib/ansible/cli/vault.py +++ b/lib/ansible/cli/vault.py @@ -53,6 +53,9 @@ class VaultCLI(CLI): epilog = "\nSee '%s --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0]) ) + # Workaround for #12004: show version without supplying a dummy action + self.parser.parse_args() + self.set_action() # options specific to self.actions From ab2f47327a82148441140c9b98a02a6e28877153 Mon Sep 17 00:00:00 2001 From: Sandra Wills Date: Wed, 6 Jan 2016 13:59:25 -0500 Subject: [PATCH 0297/1113] removed the "wy-side-nav-search" element this is so we can use the new swiftype search and it's search input --- docsite/_themes/srtd/layout.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docsite/_themes/srtd/layout.html b/docsite/_themes/srtd/layout.html index 41b6b75c1d2..a10b7656aab 100644 --- a/docsite/_themes/srtd/layout.html +++ b/docsite/_themes/srtd/layout.html @@ -150,11 +150,6 @@

- -