Merge remote-tracking branch 'upstream/devel' into docker-read-only-container

reviewable/pr18780/r1
Johan Haals 9 years ago
commit 8ec6538ad4

@ -298,7 +298,19 @@ options:
required: false required: false
default: null default: null
version_added: "2.0" version_added: "2.0"
cap_add:
description:
- Add capabilities for the container. Requires docker-py >= 0.5.0.
required: false
default: false
version_added: "2.0"
cap_drop:
description:
- Drop capabilities for the container. Requires docker-py >= 0.5.0.
required: false
default: false
aliases: []
version_added: "2.0"
author: author:
- "Cove Schneider (@cove)" - "Cove Schneider (@cove)"
- "Joshua Conner (@joshuaconner)" - "Joshua Conner (@joshuaconner)"
@ -557,6 +569,8 @@ class DockerManager(object):
'log_driver': ((1, 2, 0), '1.18'), 'log_driver': ((1, 2, 0), '1.18'),
'host_config': ((0, 7, 0), '1.15'), 'host_config': ((0, 7, 0), '1.15'),
'cpu_set': ((0, 6, 0), '1.14'), 'cpu_set': ((0, 6, 0), '1.14'),
'cap_add': ((0, 5, 0), '1.14'),
'cap_drop': ((0, 5, 0), '1.14'),
# Clientside only # Clientside only
'insecure_registry': ((0, 5, 0), '0.0') 'insecure_registry': ((0, 5, 0), '0.0')
} }
@ -1328,7 +1342,8 @@ class DockerManager(object):
optionals = {} optionals = {}
for optional_param in ('dns', 'volumes_from', 'restart_policy', for optional_param in ('dns', 'volumes_from', 'restart_policy',
'restart_policy_retry', 'pid', 'extra_hosts', 'log_driver'): 'restart_policy_retry', 'pid', 'extra_hosts', 'log_driver',
'cap_add', 'cap_drop'):
optionals[optional_param] = self.module.params.get(optional_param) optionals[optional_param] = self.module.params.get(optional_param)
if optionals['dns'] is not None: if optionals['dns'] is not None:
@ -1363,6 +1378,14 @@ class DockerManager(object):
log_config.type = optionals['log_driver'] log_config.type = optionals['log_driver']
params['log_config'] = log_config params['log_config'] = log_config
if optionals['cap_add'] is not None:
self.ensure_capability('cap_add')
params['cap_add'] = optionals['cap_add']
if optionals['cap_drop'] is not None:
self.ensure_capability('cap_drop')
params['cap_drop'] = optionals['cap_drop']
return docker.utils.create_host_config(**params) return docker.utils.create_host_config(**params)
def create_containers(self, count=1): def create_containers(self, count=1):
@ -1616,6 +1639,8 @@ def main():
insecure_registry = dict(default=False, type='bool'), insecure_registry = dict(default=False, type='bool'),
log_driver = dict(default=None, choices=['json-file', 'none', 'syslog']), log_driver = dict(default=None, choices=['json-file', 'none', 'syslog']),
cpu_set = dict(default=None), cpu_set = dict(default=None),
cap_add = dict(default=None, type='list'),
cap_drop = dict(default=None, type='list'),
read_only = dict(default=False, type='bool'), read_only = dict(default=False, type='bool'),
), ),
required_together = ( required_together = (

@ -75,7 +75,7 @@ options:
aliases: [] aliases: []
state: state:
description: description:
- desired state of the persistent disk - desired state of the network or firewall
required: false required: false
default: "present" default: "present"
choices: ["active", "present", "absent", "deleted"] choices: ["active", "present", "absent", "deleted"]
@ -264,7 +264,7 @@ def main():
if fw: if fw:
gce.ex_destroy_firewall(fw) gce.ex_destroy_firewall(fw)
changed = True changed = True
if name: elif name:
json_output['name'] = name json_output['name'] = name
network = None network = None
try: try:

@ -70,14 +70,6 @@ options:
choices: [ "yes", "no" ] choices: [ "yes", "no" ]
default: "yes" default: "yes"
aliases: [ "thirsty" ] aliases: [ "thirsty" ]
validate:
description:
- The validation command to run before copying into place. The path to the file to
validate is passed in via '%s' which must be present as in the visudo example below.
The command is passed securely so shell features like expansion and pipes won't work.
required: false
default: ""
version_added: "1.2"
directory_mode: directory_mode:
description: description:
- When doing a recursive copy set the mode for the directories. If this is not set we will use the system - When doing a recursive copy set the mode for the directories. If this is not set we will use the system
@ -86,6 +78,7 @@ options:
required: false required: false
version_added: "1.5" version_added: "1.5"
extends_documentation_fragment: files extends_documentation_fragment: files
extends_documentation_fragment: validate
author: author:
- "Ansible Core Team" - "Ansible Core Team"
- "Michael DeHaan" - "Michael DeHaan"

@ -31,6 +31,7 @@ author:
- "Daniel Hokka Zakrissoni (@dhozac)" - "Daniel Hokka Zakrissoni (@dhozac)"
- "Ahti Kitsik (@ahtik)" - "Ahti Kitsik (@ahtik)"
extends_documentation_fragment: files extends_documentation_fragment: files
extends_documentation_fragment: validate
short_description: Ensure a particular line is in a file, or replace an short_description: Ensure a particular line is in a file, or replace an
existing line using a back-referenced regular expression. existing line using a back-referenced regular expression.
description: description:
@ -116,16 +117,6 @@ options:
description: description:
- Create a backup file including the timestamp information so you can - Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly. get the original file back if you somehow clobbered it incorrectly.
validate:
required: false
description:
- validation to run before copying into place.
Use %s in the command to indicate the current file to validate.
The command is passed securely so shell features like
expansion and pipes won't work.
required: false
default: None
version_added: "1.4"
others: others:
description: description:
- All arguments accepted by the M(file) module also work here. - All arguments accepted by the M(file) module also work here.

@ -27,6 +27,7 @@ DOCUMENTATION = """
module: replace module: replace
author: "Evan Kaufman (@EvanK)" author: "Evan Kaufman (@EvanK)"
extends_documentation_fragment: files extends_documentation_fragment: files
extends_documentation_fragment: validate
short_description: Replace all instances of a particular string in a short_description: Replace all instances of a particular string in a
file using a back-referenced regular expression. file using a back-referenced regular expression.
description: description:
@ -61,12 +62,6 @@ options:
description: description:
- Create a backup file including the timestamp information so you can - Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly. get the original file back if you somehow clobbered it incorrectly.
validate:
required: false
description:
- validation to run before copying into place
required: false
default: None
others: others:
description: description:
- All arguments accepted by the M(file) module also work here. - All arguments accepted by the M(file) module also work here.

@ -38,15 +38,6 @@ options:
required: false required: false
choices: [ "yes", "no" ] choices: [ "yes", "no" ]
default: "no" default: "no"
validate:
description:
- The validation command to run before copying into place.
- The path to the file to validate is passed in via '%s' which must be present as in the visudo example below.
- validation to run before copying into place. The command is passed
securely so shell features like expansion and pipes won't work.
required: false
default: ""
version_added: "1.2"
force: force:
description: description:
- the default is C(yes), which will replace the remote file when contents - the default is C(yes), which will replace the remote file when contents
@ -62,6 +53,7 @@ author:
- Ansible Core Team - Ansible Core Team
- Michael DeHaan - Michael DeHaan
extends_documentation_fragment: files extends_documentation_fragment: files
extends_documentation_fragment: validate
''' '''
EXAMPLES = ''' EXAMPLES = '''

@ -356,7 +356,8 @@ def main():
rc, out_pip, err_pip = module.run_command(cmd, path_prefix=path_prefix, cwd=this_dir) rc, out_pip, err_pip = module.run_command(cmd, path_prefix=path_prefix, cwd=this_dir)
out += out_pip out += out_pip
err += err_pip err += err_pip
if rc == 1 and state == 'absent' and 'not installed' in out_pip: if rc == 1 and state == 'absent' and \
('not installed' in out_pip or 'not installed' in err_pip):
pass # rc is 1 when attempting to uninstall non-installed package pass # rc is 1 when attempting to uninstall non-installed package
elif rc != 0: elif rc != 0:
_fail(module, cmd, out, err) _fail(module, cmd, out, err)

@ -65,6 +65,13 @@ options:
required: false required: false
default: "no" default: "no"
choices: [ "yes", "no" ] choices: [ "yes", "no" ]
update:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "2.0"
description:
- If C(no), do not retrieve new revisions from the origin repository
executable: executable:
required: false required: false
default: null default: null
@ -210,6 +217,7 @@ def main():
revision = dict(default=None, aliases=['version']), revision = dict(default=None, aliases=['version']),
force = dict(default='no', type='bool'), force = dict(default='no', type='bool'),
purge = dict(default='no', type='bool'), purge = dict(default='no', type='bool'),
update = dict(default='yes', type='bool'),
executable = dict(default=None), executable = dict(default=None),
), ),
) )
@ -218,6 +226,7 @@ def main():
revision = module.params['revision'] revision = module.params['revision']
force = module.params['force'] force = module.params['force']
purge = module.params['purge'] purge = module.params['purge']
update = module.params['update']
hg_path = module.params['executable'] or module.get_bin_path('hg', True) hg_path = module.params['executable'] or module.get_bin_path('hg', True)
hgrc = os.path.join(dest, '.hg/hgrc') hgrc = os.path.join(dest, '.hg/hgrc')
@ -234,6 +243,9 @@ def main():
(rc, out, err) = hg.clone() (rc, out, err) = hg.clone()
if rc != 0: if rc != 0:
module.fail_json(msg=err) module.fail_json(msg=err)
elif not update:
# Just return having found a repo already in the dest path
before = hg.get_revision()
elif hg.at_revision: elif hg.at_revision:
# no update needed, don't pull # no update needed, don't pull
before = hg.get_revision() before = hg.get_revision()

@ -90,7 +90,7 @@ notes:
- I(virtualenv) (U(http://www.virtualenv.org)) must be installed on the remote host if the virtualenv parameter is specified. - I(virtualenv) (U(http://www.virtualenv.org)) must be installed on the remote host if the virtualenv parameter is specified.
- This module will create a virtualenv if the virtualenv parameter is specified and a virtualenv does not already exist at the given location. - This module will create a virtualenv if the virtualenv parameter is specified and a virtualenv does not already exist at the given location.
- This module assumes English error messages for the 'createcachetable' command to detect table existence, unfortunately. - This module assumes English error messages for the 'createcachetable' command to detect table existence, unfortunately.
- To be able to use the migrate command, you must have south installed and added as an app in your settings - To be able to use the migrate command with django versions < 1.7, you must have south installed and added as an app in your settings
- To be able to use the collectstatic command, you must have enabled staticfiles in your settings - To be able to use the collectstatic command, you must have enabled staticfiles in your settings
requirements: [ "virtualenv", "django" ] requirements: [ "virtualenv", "django" ]
author: "Scott Anderson (@tastychutney)" author: "Scott Anderson (@tastychutney)"
@ -163,7 +163,7 @@ def syncdb_filter_output(line):
return ("Creating table " in line) or ("Installed" in line and "Installed 0 object" not in line) return ("Creating table " in line) or ("Installed" in line and "Installed 0 object" not in line)
def migrate_filter_output(line): def migrate_filter_output(line):
return ("Migrating forwards " in line) or ("Installed" in line and "Installed 0 object" not in line) return ("Migrating forwards " in line) or ("Installed" in line and "Installed 0 object" not in line) or ("Applying" in line)
def collectstatic_filter_output(line): def collectstatic_filter_output(line):
return "0 static files" not in line return "0 static files" not in line

@ -19,6 +19,11 @@
$params = Parse-Args $args; $params = Parse-Args $args;
function Date_To_Timestamp($start_date, $end_date)
{
Write-Output (New-TimeSpan -Start $start_date -End $end_date).TotalSeconds
}
$path = Get-Attr $params "path" $FALSE; $path = Get-Attr $params "path" $FALSE;
If ($path -eq $FALSE) If ($path -eq $FALSE)
{ {
@ -36,6 +41,7 @@ If (Test-Path $path)
{ {
Set-Attr $result.stat "exists" $TRUE; Set-Attr $result.stat "exists" $TRUE;
$info = Get-Item $path; $info = Get-Item $path;
$epoch_date = Get-Date -Date "01/01/1970"
If ($info.Directory) # Only files have the .Directory attribute. If ($info.Directory) # Only files have the .Directory attribute.
{ {
Set-Attr $result.stat "isdir" $FALSE; Set-Attr $result.stat "isdir" $FALSE;
@ -45,6 +51,12 @@ If (Test-Path $path)
{ {
Set-Attr $result.stat "isdir" $TRUE; Set-Attr $result.stat "isdir" $TRUE;
} }
Set-Attr $result.stat "extension" $info.Extension;
Set-Attr $result.stat "attributes" $info.Attributes.ToString();
Set-Attr $result.stat "owner" $info.GetAccessControl().Owner;
Set-Attr $result.stat "creationtime" (Date_To_Timestamp $epoch_date $info.CreationTime);
Set-Attr $result.stat "lastaccesstime" (Date_To_Timestamp $epoch_date $info.LastAccessTime);
Set-Attr $result.stat "lastwritetime" (Date_To_Timestamp $epoch_date $info.LastWriteTime);
} }
Else Else
{ {

Loading…
Cancel
Save