Core/extras split WIP.

reviewable/pr18780/r1
Michael DeHaan 10 years ago
parent 21f7c5e01a
commit bce9bcc0fe

@ -1,313 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# see examples/playbooks/get_url.yml
import shutil
import datetime
import re
import tempfile
DOCUMENTATION = '''
---
module: get_url
short_description: Downloads files from HTTP, HTTPS, or FTP to node
description:
- Downloads files from HTTP, HTTPS, or FTP to the remote server. The remote
server I(must) have direct access to the remote resource.
- By default, if an environment variable C(<protocol>_proxy) is set on
the target host, requests will be sent through that proxy. This
behaviour can be overridden by setting a variable for this task
(see `setting the environment
<http://docs.ansible.com/playbooks_environment.html>`_),
or by using the use_proxy option.
version_added: "0.6"
options:
url:
description:
- HTTP, HTTPS, or FTP URL in the form (http|https|ftp)://[user[:pass]]@host.domain[:port]/path
required: true
default: null
aliases: []
dest:
description:
- absolute path of where to download the file to.
- If C(dest) is a directory, either the server provided filename or, if
none provided, the base name of the URL on the remote server will be
used. If a directory, C(force) has no effect.
If C(dest) is a directory, the file will always be
downloaded (regardless of the force option), but replaced only if the contents changed.
required: true
default: null
force:
description:
- If C(yes) and C(dest) is not a directory, will download the file every
time and replace the file if the contents change. If C(no), the file
will only be downloaded if the destination does not exist. Generally
should be C(yes) only for small local files. Prior to 0.6, this module
behaved as if C(yes) was the default.
version_added: "0.7"
required: false
choices: [ "yes", "no" ]
default: "no"
aliases: [ "thirsty" ]
sha256sum:
description:
- If a SHA-256 checksum is passed to this parameter, the digest of the
destination file will be calculated after it is downloaded to ensure
its integrity and verify that the transfer completed successfully.
version_added: "1.3"
required: false
default: null
use_proxy:
description:
- if C(no), it will not use a proxy, even if one is defined in
an environment variable on the target hosts.
required: false
default: 'yes'
choices: ['yes', 'no']
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
timeout:
description:
- Timeout for URL request
required: false
default: 10
version_added: '1.8'
url_username:
description:
- The username for use in HTTP basic authentication. This parameter can be used
without C(url_password) for sites that allow empty passwords.
required: false
version_added: '1.6'
url_password:
description:
- The password for use in HTTP basic authentication. If the C(url_username)
parameter is not specified, the C(url_password) parameter will not be used.
required: false
version_added: '1.6'
others:
description:
- all arguments accepted by the M(file) module also work here
required: false
notes:
- This module doesn't yet support configuration for proxies.
# informational: requirements for nodes
requirements: [ urllib2, urlparse ]
author: Jan-Piet Mens
'''
EXAMPLES='''
- name: download foo.conf
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf mode=0440
- name: download file with sha256 check
get_url: url=http://example.com/path/file.conf dest=/etc/foo.conf sha256sum=b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
'''
try:
import hashlib
HAS_HASHLIB=True
except ImportError:
HAS_HASHLIB=False
# ==============================================================
# url handling
def url_filename(url):
fn = os.path.basename(urlparse.urlsplit(url)[2])
if fn == '':
return 'index.html'
return fn
def url_get(module, url, dest, use_proxy, last_mod_time, force, timeout=10):
"""
Download data from the url and store in a temporary file.
Return (tempfile, info about the request)
"""
rsp, info = fetch_url(module, url, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout)
if info['status'] == 304:
module.exit_json(url=url, dest=dest, changed=False, msg=info.get('msg', ''))
# create a temporary file and copy content to do md5-based replacement
if info['status'] != 200:
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], url=url, dest=dest)
fd, tempname = tempfile.mkstemp()
f = os.fdopen(fd, 'wb')
try:
shutil.copyfileobj(rsp, f)
except Exception, err:
os.remove(tempname)
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
f.close()
rsp.close()
return tempname, info
def extract_filename_from_headers(headers):
"""
Extracts a filename from the given dict of HTTP headers.
Looks for the content-disposition header and applies a regex.
Returns the filename if successful, else None."""
cont_disp_regex = 'attachment; ?filename="?([^"]+)'
res = None
if 'content-disposition' in headers:
cont_disp = headers['content-disposition']
match = re.match(cont_disp_regex, cont_disp)
if match:
res = match.group(1)
# Try preventing any funny business.
res = os.path.basename(res)
return res
# ==============================================================
# main
def main():
argument_spec = url_argument_spec()
argument_spec.update(
url = dict(required=True),
dest = dict(required=True),
sha256sum = dict(default=''),
timeout = dict(required=False, type='int', default=10),
)
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec = argument_spec,
add_file_common_args=True
)
url = module.params['url']
dest = os.path.expanduser(module.params['dest'])
force = module.params['force']
sha256sum = module.params['sha256sum']
use_proxy = module.params['use_proxy']
timeout = module.params['timeout']
dest_is_dir = os.path.isdir(dest)
last_mod_time = None
if not dest_is_dir and os.path.exists(dest):
if not force:
module.exit_json(msg="file already exists", dest=dest, url=url, changed=False)
# If the file already exists, prepare the last modified time for the
# request.
mtime = os.path.getmtime(dest)
last_mod_time = datetime.datetime.utcfromtimestamp(mtime)
# download to tmpsrc
tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout)
# Now the request has completed, we can finally generate the final
# destination file name from the info dict.
if dest_is_dir:
filename = extract_filename_from_headers(info)
if not filename:
# Fall back to extracting the filename from the URL.
# Pluck the URL from the info, since a redirect could have changed
# it.
filename = url_filename(info['url'])
dest = os.path.join(dest, filename)
md5sum_src = None
md5sum_dest = None
# raise an error if there is no tmpsrc file
if not os.path.exists(tmpsrc):
os.remove(tmpsrc)
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'])
if not os.access(tmpsrc, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Source %s not readable" % (tmpsrc))
md5sum_src = module.md5(tmpsrc)
# check if there is no dest file
if os.path.exists(dest):
# raise an error if copy has no permission on dest
if not os.access(dest, os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not writable" % (dest))
if not os.access(dest, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not readable" % (dest))
md5sum_dest = module.md5(dest)
else:
if not os.access(os.path.dirname(dest), os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not writable" % (os.path.dirname(dest)))
if md5sum_src != md5sum_dest:
try:
shutil.copyfile(tmpsrc, dest)
except Exception, err:
os.remove(tmpsrc)
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
changed = True
else:
changed = False
# Check the digest of the destination file and ensure that it matches the
# sha256sum parameter if it is present
if sha256sum != '':
# Remove any non-alphanumeric characters, including the infamous
# Unicode zero-width space
stripped_sha256sum = re.sub(r'\W+', '', sha256sum)
if not HAS_HASHLIB:
os.remove(dest)
module.fail_json(msg="The sha256sum parameter requires hashlib, which is available in Python 2.5 and higher")
else:
destination_checksum = module.sha256(dest)
if stripped_sha256sum.lower() != destination_checksum:
os.remove(dest)
module.fail_json(msg="The SHA-256 checksum for %s did not match %s; it was %s." % (dest, sha256sum, destination_checksum))
os.remove(tmpsrc)
# allow file attribute changes
module.params['path'] = dest
file_args = module.load_file_common_arguments(module.params)
file_args['path'] = dest
changed = module.set_fs_attributes_if_different(file_args, changed)
# Mission complete
module.exit_json(url=url, dest=dest, src=tmpsrc, md5sum=md5sum_src,
sha256sum=sha256sum, changed=changed, msg=info.get('msg', ''))
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()

@ -1,75 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: slurp
version_added: historical
short_description: Slurps a file from remote nodes
description:
- This module works like M(fetch). It is used for fetching a base64-
encoded blob containing the data in a remote file.
options:
src:
description:
- The file on the remote system to fetch. This I(must) be a file, not a
directory.
required: true
default: null
aliases: []
notes:
- "See also: M(fetch)"
requirements: []
author: Michael DeHaan
'''
EXAMPLES = '''
ansible host -m slurp -a 'src=/tmp/xx'
host | success >> {
"content": "aGVsbG8gQW5zaWJsZSB3b3JsZAo=",
"encoding": "base64"
}
'''
import base64
def main():
module = AnsibleModule(
argument_spec = dict(
src = dict(required=True, aliases=['path']),
),
supports_check_mode=True
)
source = os.path.expanduser(module.params['src'])
if not os.path.exists(source):
module.fail_json(msg="file not found: %s" % source)
if not os.access(source, os.R_OK):
module.fail_json(msg="file is not readable: %s" % source)
data = base64.b64encode(file(source).read())
module.exit_json(content=data, source=source, encoding='base64')
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,445 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Romeo Theriault <romeot () hawaii.edu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# see examples/playbooks/uri.yml
import shutil
import tempfile
import base64
import datetime
try:
import json
except ImportError:
import simplejson as json
DOCUMENTATION = '''
---
module: uri
short_description: Interacts with webservices
description:
- Interacts with HTTP and HTTPS web services and supports Digest, Basic and WSSE
HTTP authentication mechanisms.
version_added: "1.1"
options:
url:
description:
- HTTP or HTTPS URL in the form (http|https)://host.domain[:port]/path
required: true
default: null
aliases: []
dest:
description:
- path of where to download the file to (if desired). If I(dest) is a directory, the basename of the file on the remote server will be used.
required: false
default: null
user:
description:
- username for the module to use for Digest, Basic or WSSE authentication.
required: false
default: null
password:
description:
- password for the module to use for Digest, Basic or WSSE authentication.
required: false
default: null
body:
description:
- The body of the http request/response to the web service.
required: false
default: null
method:
description:
- The HTTP method of the request or response.
required: false
choices: [ "GET", "POST", "PUT", "HEAD", "DELETE", "OPTIONS", "PATCH" ]
default: "GET"
return_content:
description:
- Whether or not to return the body of the request as a "content" key in the dictionary result. If the reported Content-type is "application/json", then the JSON is additionally loaded into a key called C(json) in the dictionary results.
required: false
choices: [ "yes", "no" ]
default: "no"
force_basic_auth:
description:
- httplib2, the library used by the uri module only sends authentication information when a webservice
responds to an initial request with a 401 status. Since some basic auth services do not properly
send a 401, logins will fail. This option forces the sending of the Basic authentication header
upon initial request.
required: false
choices: [ "yes", "no" ]
default: "no"
follow_redirects:
description:
- Whether or not the URI module should follow redirects. C(all) will follow all redirects.
C(safe) will follow only "safe" redirects, where "safe" means that the client is only
doing a GET or HEAD on the URI to which it is being redirected. C(none) will not follow
any redirects. Note that C(yes) and C(no) choices are accepted for backwards compatibility,
where C(yes) is the equivalent of C(all) and C(no) is the equivalent of C(safe). C(yes) and C(no)
are deprecated and will be removed in some future version of Ansible.
required: false
choices: [ "all", "safe", "none" ]
default: "safe"
creates:
description:
- a filename, when it already exists, this step will not be run.
required: false
removes:
description:
- a filename, when it does not exist, this step will not be run.
required: false
status_code:
description:
- A valid, numeric, HTTP status code that signifies success of the request. Can also be comma separated list of status codes.
required: false
default: 200
timeout:
description:
- The socket level timeout in seconds
required: false
default: 30
HEADER_:
description:
- Any parameter starting with "HEADER_" is a sent with your request as a header.
For example, HEADER_Content-Type="application/json" would send the header
"Content-Type" along with your request with a value of "application/json".
required: false
default: null
others:
description:
- all arguments accepted by the M(file) module also work here
required: false
# informational: requirements for nodes
requirements: [ urlparse, httplib2 ]
author: Romeo Theriault
'''
EXAMPLES = '''
# Check that you can connect (GET) to a page and it returns a status 200
- uri: url=http://www.example.com
# Check that a page returns a status 200 and fail if the word AWESOME is not in the page contents.
- action: uri url=http://www.example.com return_content=yes
register: webpage
- action: fail
when: 'AWESOME' not in "{{ webpage.content }}"
# Create a JIRA issue
- uri: url=https://your.jira.example.com/rest/api/2/issue/
method=POST user=your_username password=your_pass
body="{{ lookup('file','issue.json') }}" force_basic_auth=yes
status_code=201 HEADER_Content-Type="application/json"
# Login to a form based webpage, then use the returned cookie to
# access the app in later tasks
- uri: url=https://your.form.based.auth.examle.com/index.php
method=POST body="name=your_username&password=your_password&enter=Sign%20in"
status_code=302 HEADER_Content-Type="application/x-www-form-urlencoded"
register: login
- uri: url=https://your.form.based.auth.example.com/dashboard.php
method=GET return_content=yes HEADER_Cookie="{{login.set_cookie}}"
# Queue build of a project in Jenkins:
- uri: url=http://{{jenkins.host}}/job/{{jenkins.job}}/build?token={{jenkins.token}}
method=GET user={{jenkins.user}} password={{jenkins.password}} force_basic_auth=yes status_code=201
'''
HAS_HTTPLIB2 = True
try:
import httplib2
except ImportError:
HAS_HTTPLIB2 = False
HAS_URLPARSE = True
try:
import urlparse
import socket
except ImportError:
HAS_URLPARSE = False
def write_file(module, url, dest, content):
# create a tempfile with some test content
fd, tmpsrc = tempfile.mkstemp()
f = open(tmpsrc, 'wb')
try:
f.write(content)
except Exception, err:
os.remove(tmpsrc)
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
f.close()
md5sum_src = None
md5sum_dest = None
# raise an error if there is no tmpsrc file
if not os.path.exists(tmpsrc):
os.remove(tmpsrc)
module.fail_json(msg="Source %s does not exist" % (tmpsrc))
if not os.access(tmpsrc, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Source %s not readable" % (tmpsrc))
md5sum_src = module.md5(tmpsrc)
# check if there is no dest file
if os.path.exists(dest):
# raise an error if copy has no permission on dest
if not os.access(dest, os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not writable" % (dest))
if not os.access(dest, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not readable" % (dest))
md5sum_dest = module.md5(dest)
else:
if not os.access(os.path.dirname(dest), os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination dir %s not writable" % (os.path.dirname(dest)))
if md5sum_src != md5sum_dest:
try:
shutil.copyfile(tmpsrc, dest)
except Exception, err:
os.remove(tmpsrc)
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
os.remove(tmpsrc)
def url_filename(url):
fn = os.path.basename(urlparse.urlsplit(url)[2])
if fn == '':
return 'index.html'
return fn
def uri(module, url, dest, user, password, body, method, headers, redirects, socket_timeout):
# To debug
#httplib2.debug = 4
# Handle Redirects
if redirects == "all" or redirects == "yes":
follow_redirects = True
follow_all_redirects = True
elif redirects == "none":
follow_redirects = False
follow_all_redirects = False
else:
follow_redirects = True
follow_all_redirects = False
# Create a Http object and set some default options.
h = httplib2.Http(disable_ssl_certificate_validation=True, timeout=socket_timeout)
h.follow_all_redirects = follow_all_redirects
h.follow_redirects = follow_redirects
h.forward_authorization_headers = True
# If they have a username or password verify they have both, then add them to the request
if user is not None and password is None:
module.fail_json(msg="Both a username and password need to be set.")
if password is not None and user is None:
module.fail_json(msg="Both a username and password need to be set.")
if user is not None and password is not None:
h.add_credentials(user, password)
# is dest is set and is a directory, let's check if we get redirected and
# set the filename from that url
redirected = False
resp_redir = {}
r = {}
if dest is not None:
dest = os.path.expanduser(dest)
if os.path.isdir(dest):
# first check if we are redirected to a file download
h.follow_redirects=False
# Try the request
try:
resp_redir, content_redir = h.request(url, method=method, body=body, headers=headers)
# if we are redirected, update the url with the location header,
# and update dest with the new url filename
except:
pass
if 'status' in resp_redir and resp_redir['status'] in ["301", "302", "303", "307"]:
url = resp_redir['location']
redirected = True
dest = os.path.join(dest, url_filename(url))
# if destination file already exist, only download if file newer
if os.path.exists(dest):
t = datetime.datetime.utcfromtimestamp(os.path.getmtime(dest))
tstamp = t.strftime('%a, %d %b %Y %H:%M:%S +0000')
headers['If-Modified-Since'] = tstamp
# do safe redirects now, including 307
h.follow_redirects=follow_redirects
# Make the request, or try to :)
try:
resp, content = h.request(url, method=method, body=body, headers=headers)
r['redirected'] = redirected
r.update(resp_redir)
r.update(resp)
try:
return r, unicode(content.decode('unicode_escape')), dest
except:
return r, content, dest
except httplib2.RedirectMissingLocation:
module.fail_json(msg="A 3xx redirect response code was provided but no Location: header was provided to point to the new location.")
except httplib2.RedirectLimit:
module.fail_json(msg="The maximum number of redirections was reached without coming to a final URI.")
except httplib2.ServerNotFoundError:
module.fail_json(msg="Unable to resolve the host name given.")
except httplib2.RelativeURIError:
module.fail_json(msg="A relative, as opposed to an absolute URI, was passed in.")
except httplib2.FailedToDecompressContent:
module.fail_json(msg="The headers claimed that the content of the response was compressed but the decompression algorithm applied to the content failed.")
except httplib2.UnimplementedDigestAuthOptionError:
module.fail_json(msg="The server requested a type of Digest authentication that we are unfamiliar with.")
except httplib2.UnimplementedHmacDigestAuthOptionError:
module.fail_json(msg="The server requested a type of HMACDigest authentication that we are unfamiliar with.")
except httplib2.UnimplementedHmacDigestAuthOptionError:
module.fail_json(msg="The server requested a type of HMACDigest authentication that we are unfamiliar with.")
except socket.error, e:
module.fail_json(msg="Socket error: %s to %s" % (e, url))
def main():
module = AnsibleModule(
argument_spec = dict(
url = dict(required=True),
dest = dict(required=False, default=None),
user = dict(required=False, default=None),
password = dict(required=False, default=None),
body = dict(required=False, default=None),
method = dict(required=False, default='GET', choices=['GET', 'POST', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'PATCH']),
return_content = dict(required=False, default='no', type='bool'),
force_basic_auth = dict(required=False, default='no', type='bool'),
follow_redirects = dict(required=False, default='safe', choices=['all', 'safe', 'none', 'yes', 'no']),
creates = dict(required=False, default=None),
removes = dict(required=False, default=None),
status_code = dict(required=False, default=[200], type='list'),
timeout = dict(required=False, default=30, type='int'),
),
check_invalid_arguments=False,
add_file_common_args=True
)
if not HAS_HTTPLIB2:
module.fail_json(msg="httplib2 is not installed")
if not HAS_URLPARSE:
module.fail_json(msg="urlparse is not installed")
url = module.params['url']
user = module.params['user']
password = module.params['password']
body = module.params['body']
method = module.params['method']
dest = module.params['dest']
return_content = module.params['return_content']
force_basic_auth = module.params['force_basic_auth']
redirects = module.params['follow_redirects']
creates = module.params['creates']
removes = module.params['removes']
status_code = [int(x) for x in list(module.params['status_code'])]
socket_timeout = module.params['timeout']
# Grab all the http headers. Need this hack since passing multi-values is currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}')
dict_headers = {}
for key, value in module.params.iteritems():
if key.startswith("HEADER_"):
skey = key.replace("HEADER_", "")
dict_headers[skey] = value
if creates is not None:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of uri executions.
creates = os.path.expanduser(creates)
if os.path.exists(creates):
module.exit_json(stdout="skipped, since %s exists" % creates, skipped=True, changed=False, stderr=False, rc=0)
if removes is not None:
# do not run the command if the line contains removes=filename
# and the filename do not exists. This allows idempotence
# of uri executions.
v = os.path.expanduser(removes)
if not os.path.exists(removes):
module.exit_json(stdout="skipped, since %s does not exist" % removes, skipped=True, changed=False, stderr=False, rc=0)
# httplib2 only sends authentication after the server asks for it with a 401.
# Some 'basic auth' servies fail to send a 401 and require the authentication
# up front. This creates the Basic authentication header and sends it immediately.
if force_basic_auth:
dict_headers["Authorization"] = "Basic {0}".format(base64.b64encode("{0}:{1}".format(user, password)))
# Make the request
resp, content, dest = uri(module, url, dest, user, password, body, method, dict_headers, redirects, socket_timeout)
resp['status'] = int(resp['status'])
# Write the file out if requested
if dest is not None:
if resp['status'] == 304:
changed = False
else:
write_file(module, url, dest, content)
# allow file attribute changes
changed = True
module.params['path'] = dest
file_args = module.load_file_common_arguments(module.params)
file_args['path'] = dest
changed = module.set_fs_attributes_if_different(file_args, changed)
resp['path'] = dest
else:
changed = False
# Transmogrify the headers, replacing '-' with '_', since variables dont work with dashes.
uresp = {}
for key, value in resp.iteritems():
ukey = key.replace("-", "_")
uresp[ukey] = value
if 'content_type' in uresp:
if uresp['content_type'].startswith('application/json'):
try:
js = json.loads(content)
uresp['json'] = js
except:
pass
if resp['status'] not in status_code:
module.fail_json(msg="Status code was not " + str(status_code), content=content, **uresp)
elif return_content:
module.exit_json(changed=changed, content=content, **uresp)
else:
module.exit_json(changed=changed, **uresp)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,562 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Flowroute LLC
# Written by Matthew Williams <matthew@flowroute.com>
# Based on yum module written by Seth Vidal <skvidal at fedoraproject.org>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: apt
short_description: Manages apt-packages
description:
- Manages I(apt) packages (such as for Debian/Ubuntu).
version_added: "0.0.2"
options:
name:
description:
- A package name, like C(foo), or package specifier with version, like C(foo=1.0). Wildcards (fnmatch) like apt* are also supported.
required: false
default: null
state:
description:
- Indicates the desired package state. C(latest) ensures that the latest version is installed.
required: false
default: present
choices: [ "latest", "absent", "present" ]
update_cache:
description:
- Run the equivalent of C(apt-get update) before the operation. Can be run as part of the package installation or as a separate step.
required: false
default: no
choices: [ "yes", "no" ]
cache_valid_time:
description:
- If C(update_cache) is specified and the last run is less or equal than I(cache_valid_time) seconds ago, the C(update_cache) gets skipped.
required: false
default: no
purge:
description:
- Will force purging of configuration files if the module state is set to I(absent).
required: false
default: no
choices: [ "yes", "no" ]
default_release:
description:
- Corresponds to the C(-t) option for I(apt) and sets pin priorities
required: false
default: null
install_recommends:
description:
- Corresponds to the C(--no-install-recommends) option for I(apt). Default behavior (C(yes)) replicates apt's default behavior; C(no) does not install recommended packages. Suggested packages are never installed.
required: false
default: yes
choices: [ "yes", "no" ]
force:
description:
- If C(yes), force installs/removes.
required: false
default: "no"
choices: [ "yes", "no" ]
upgrade:
description:
- 'If yes or safe, performs an aptitude safe-upgrade.'
- 'If full, performs an aptitude full-upgrade.'
- 'If dist, performs an apt-get dist-upgrade.'
- 'Note: This does not upgrade a specific package, use state=latest for that.'
version_added: "1.1"
required: false
default: "yes"
choices: [ "yes", "safe", "full", "dist"]
dpkg_options:
description:
- Add dpkg options to apt command. Defaults to '-o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold"'
- Options should be supplied as comma separated list
required: false
default: 'force-confdef,force-confold'
deb:
description:
- Path to a .deb package on the remote machine.
required: false
version_added: "1.6"
requirements: [ python-apt, aptitude ]
author: Matthew Williams
notes:
- Three of the upgrade modes (C(full), C(safe) and its alias C(yes)) require C(aptitude), otherwise
C(apt-get) suffices.
'''
EXAMPLES = '''
# Update repositories cache and install "foo" package
- apt: name=foo update_cache=yes
# Remove "foo" package
- apt: name=foo state=absent
# Install the package "foo"
- apt: name=foo state=present
# Install the version '1.00' of package "foo"
- apt: name=foo=1.00 state=present
# Update the repository cache and update package "nginx" to latest version using default release squeeze-backport
- apt: name=nginx state=latest default_release=squeeze-backports update_cache=yes
# Install latest version of "openjdk-6-jdk" ignoring "install-recommends"
- apt: name=openjdk-6-jdk state=latest install_recommends=no
# Update all packages to the latest version
- apt: upgrade=dist
# Run the equivalent of "apt-get update" as a separate step
- apt: update_cache=yes
# Only run "update_cache=yes" if the last one is more than 3600 seconds ago
- apt: update_cache=yes cache_valid_time=3600
# Pass options to dpkg on run
- apt: upgrade=dist update_cache=yes dpkg_options='force-confold,force-confdef'
# Install a .deb package
- apt: deb=/tmp/mypackage.deb
'''
import traceback
# added to stave off future warnings about apt api
import warnings
warnings.filterwarnings('ignore', "apt API not stable yet", FutureWarning)
import os
import datetime
import fnmatch
# APT related constants
APT_ENV_VARS = dict(
DEBIAN_FRONTEND = 'noninteractive',
DEBIAN_PRIORITY = 'critical',
LANG = 'C'
)
DPKG_OPTIONS = 'force-confdef,force-confold'
APT_GET_ZERO = "0 upgraded, 0 newly installed"
APTITUDE_ZERO = "0 packages upgraded, 0 newly installed"
APT_LISTS_PATH = "/var/lib/apt/lists"
APT_UPDATE_SUCCESS_STAMP_PATH = "/var/lib/apt/periodic/update-success-stamp"
HAS_PYTHON_APT = True
try:
import apt
import apt.debfile
import apt_pkg
except ImportError:
HAS_PYTHON_APT = False
def package_split(pkgspec):
parts = pkgspec.split('=')
if len(parts) > 1:
return parts[0], parts[1]
else:
return parts[0], None
def package_status(m, pkgname, version, cache, state):
try:
# get the package from the cache, as well as the
# the low-level apt_pkg.Package object which contains
# state fields not directly acccesible from the
# higher-level apt.package.Package object.
pkg = cache[pkgname]
ll_pkg = cache._cache[pkgname] # the low-level package object
except KeyError:
if state == 'install':
if cache.get_providing_packages(pkgname):
return False, True, False
m.fail_json(msg="No package matching '%s' is available" % pkgname)
else:
return False, False, False
try:
has_files = len(pkg.installed_files) > 0
except UnicodeDecodeError:
has_files = True
except AttributeError:
has_files = False # older python-apt cannot be used to determine non-purged
try:
package_is_installed = ll_pkg.current_state == apt_pkg.CURSTATE_INSTALLED
except AttributeError: # python-apt 0.7.X has very weak low-level object
try:
# might not be necessary as python-apt post-0.7.X should have current_state property
package_is_installed = pkg.is_installed
except AttributeError:
# assume older version of python-apt is installed
package_is_installed = pkg.isInstalled
if version and package_is_installed:
try:
installed_version = pkg.installed.version
except AttributeError:
installed_version = pkg.installedVersion
return package_is_installed and fnmatch.fnmatch(installed_version, version), False, has_files
else:
try:
package_is_upgradable = pkg.is_upgradable
except AttributeError:
# assume older version of python-apt is installed
package_is_upgradable = pkg.isUpgradable
return package_is_installed, package_is_upgradable, has_files
def expand_dpkg_options(dpkg_options_compressed):
options_list = dpkg_options_compressed.split(',')
dpkg_options = ""
for dpkg_option in options_list:
dpkg_options = '%s -o "Dpkg::Options::=--%s"' \
% (dpkg_options, dpkg_option)
return dpkg_options.strip()
def expand_pkgspec_from_fnmatches(m, pkgspec, cache):
new_pkgspec = []
for pkgname_or_fnmatch_pattern in pkgspec:
# note that any of these chars is not allowed in a (debian) pkgname
if [c for c in pkgname_or_fnmatch_pattern if c in "*?[]!"]:
if "=" in pkgname_or_fnmatch_pattern:
m.fail_json(msg="pkgname wildcard and version can not be mixed")
# handle multiarch pkgnames, the idea is that "apt*" should
# only select native packages. But "apt*:i386" should still work
if not ":" in pkgname_or_fnmatch_pattern:
matches = fnmatch.filter(
[pkg.name for pkg in cache
if not ":" in pkg.name], pkgname_or_fnmatch_pattern)
else:
matches = fnmatch.filter(
[pkg.name for pkg in cache], pkgname_or_fnmatch_pattern)
if len(matches) == 0:
m.fail_json(msg="No package(s) matching '%s' available" % str(pkgname_or_fnmatch_pattern))
else:
new_pkgspec.extend(matches)
else:
new_pkgspec.append(pkgname_or_fnmatch_pattern)
return new_pkgspec
def install(m, pkgspec, cache, upgrade=False, default_release=None,
install_recommends=True, force=False,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS)):
packages = ""
pkgspec = expand_pkgspec_from_fnmatches(m, pkgspec, cache)
for package in pkgspec:
name, version = package_split(package)
installed, upgradable, has_files = package_status(m, name, version, cache, state='install')
if not installed or (upgrade and upgradable):
packages += "'%s' " % package
if len(packages) != 0:
if force:
force_yes = '--force-yes'
else:
force_yes = ''
if m.check_mode:
check_arg = '--simulate'
else:
check_arg = ''
for (k,v) in APT_ENV_VARS.iteritems():
os.environ[k] = v
cmd = "%s -y %s %s %s install %s" % (APT_GET_CMD, dpkg_options, force_yes, check_arg, packages)
if default_release:
cmd += " -t '%s'" % (default_release,)
if not install_recommends:
cmd += " --no-install-recommends"
rc, out, err = m.run_command(cmd)
if rc:
return (False, dict(msg="'apt-get install %s' failed: %s" % (packages, err), stdout=out, stderr=err))
else:
return (True, dict(changed=True, stdout=out, stderr=err))
else:
return (True, dict(changed=False))
def install_deb(m, debs, cache, force, install_recommends, dpkg_options):
changed=False
deps_to_install = []
pkgs_to_install = []
for deb_file in debs.split(','):
pkg = apt.debfile.DebPackage(deb_file)
# Check if it's already installed
if pkg.compare_to_version_in_cache() == pkg.VERSION_SAME:
continue
# Check if package is installable
if not pkg.check():
m.fail_json(msg=pkg._failure_string)
# add any missing deps to the list of deps we need
# to install so they're all done in one shot
deps_to_install.extend(pkg.missing_deps)
# and add this deb to the list of packages to install
pkgs_to_install.append(deb_file)
# install the deps through apt
retvals = {}
if len(deps_to_install) > 0:
(success, retvals) = install(m=m, pkgspec=deps_to_install, cache=cache,
install_recommends=install_recommends,
dpkg_options=expand_dpkg_options(dpkg_options))
if not success:
m.fail_json(**retvals)
changed = retvals.get('changed', False)
if len(pkgs_to_install) > 0:
options = ' '.join(["--%s"% x for x in dpkg_options.split(",")])
if m.check_mode:
options += " --simulate"
if force:
options += " --force-yes"
cmd = "dpkg %s -i %s" % (options, " ".join(pkgs_to_install))
rc, out, err = m.run_command(cmd)
if "stdout" in retvals:
stdout = retvals["stdout"] + out
else:
stdout = out
if "stderr" in retvals:
stderr = retvals["stderr"] + err
else:
stderr = err
if rc == 0:
m.exit_json(changed=True, stdout=stdout, stderr=stderr)
else:
m.fail_json(msg="%s failed" % cmd, stdout=stdout, stderr=stderr)
else:
m.exit_json(changed=changed, stdout=retvals.get('stdout',''), stderr=retvals.get('stderr',''))
def remove(m, pkgspec, cache, purge=False,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS)):
packages = ""
pkgspec = expand_pkgspec_from_fnmatches(m, pkgspec, cache)
for package in pkgspec:
name, version = package_split(package)
installed, upgradable, has_files = package_status(m, name, version, cache, state='remove')
if installed or (has_files and purge):
packages += "'%s' " % package
if len(packages) == 0:
m.exit_json(changed=False)
else:
if purge:
purge = '--purge'
else:
purge = ''
for (k,v) in APT_ENV_VARS.iteritems():
os.environ[k] = v
cmd = "%s -q -y %s %s remove %s" % (APT_GET_CMD, dpkg_options, purge, packages)
if m.check_mode:
m.exit_json(changed=True)
rc, out, err = m.run_command(cmd)
if rc:
m.fail_json(msg="'apt-get remove %s' failed: %s" % (packages, err), stdout=out, stderr=err)
m.exit_json(changed=True, stdout=out, stderr=err)
def upgrade(m, mode="yes", force=False, default_release=None,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS)):
if m.check_mode:
check_arg = '--simulate'
else:
check_arg = ''
apt_cmd = None
if mode == "dist":
# apt-get dist-upgrade
apt_cmd = APT_GET_CMD
upgrade_command = "dist-upgrade"
elif mode == "full":
# aptitude full-upgrade
apt_cmd = APTITUDE_CMD
upgrade_command = "full-upgrade"
else:
# aptitude safe-upgrade # mode=yes # default
apt_cmd = APTITUDE_CMD
upgrade_command = "safe-upgrade"
if force:
if apt_cmd == APT_GET_CMD:
force_yes = '--force-yes'
else:
force_yes = ''
else:
force_yes = ''
apt_cmd_path = m.get_bin_path(apt_cmd, required=True)
for (k,v) in APT_ENV_VARS.iteritems():
os.environ[k] = v
cmd = '%s -y %s %s %s %s' % (apt_cmd_path, dpkg_options,
force_yes, check_arg, upgrade_command)
if default_release:
cmd += " -t '%s'" % (default_release,)
rc, out, err = m.run_command(cmd)
if rc:
m.fail_json(msg="'%s %s' failed: %s" % (apt_cmd, upgrade_command, err), stdout=out)
if (apt_cmd == APT_GET_CMD and APT_GET_ZERO in out) or (apt_cmd == APTITUDE_CMD and APTITUDE_ZERO in out):
m.exit_json(changed=False, msg=out, stdout=out, stderr=err)
m.exit_json(changed=True, msg=out, stdout=out, stderr=err)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='installed', choices=['installed', 'latest', 'removed', 'absent', 'present']),
update_cache = dict(default=False, aliases=['update-cache'], type='bool'),
cache_valid_time = dict(type='int'),
purge = dict(default=False, type='bool'),
package = dict(default=None, aliases=['pkg', 'name'], type='list'),
deb = dict(default=None),
default_release = dict(default=None, aliases=['default-release']),
install_recommends = dict(default='yes', aliases=['install-recommends'], type='bool'),
force = dict(default='no', type='bool'),
upgrade = dict(choices=['yes', 'safe', 'full', 'dist']),
dpkg_options = dict(default=DPKG_OPTIONS)
),
mutually_exclusive = [['package', 'upgrade', 'deb']],
required_one_of = [['package', 'upgrade', 'update_cache', 'deb']],
supports_check_mode = True
)
if not HAS_PYTHON_APT:
try:
module.run_command('apt-get update && apt-get install python-apt -y -q', use_unsafe_shell=True, check_rc=True)
global apt, apt_pkg
import apt
import apt_pkg
except ImportError:
module.fail_json(msg="Could not import python modules: apt, apt_pkg. Please install python-apt package.")
global APTITUDE_CMD
APTITUDE_CMD = module.get_bin_path("aptitude", False)
global APT_GET_CMD
APT_GET_CMD = module.get_bin_path("apt-get")
p = module.params
if not APTITUDE_CMD and p.get('upgrade', None) in [ 'full', 'safe', 'yes' ]:
module.fail_json(msg="Could not find aptitude. Please ensure it is installed.")
install_recommends = p['install_recommends']
dpkg_options = expand_dpkg_options(p['dpkg_options'])
try:
cache = apt.Cache()
if p['default_release']:
try:
apt_pkg.config['APT::Default-Release'] = p['default_release']
except AttributeError:
apt_pkg.Config['APT::Default-Release'] = p['default_release']
# reopen cache w/ modified config
cache.open(progress=None)
if p['update_cache']:
# Default is: always update the cache
cache_valid = False
if p['cache_valid_time']:
tdelta = datetime.timedelta(seconds=p['cache_valid_time'])
try:
mtime = os.stat(APT_UPDATE_SUCCESS_STAMP_PATH).st_mtime
except:
mtime = False
if mtime is False:
# Looks like the update-success-stamp is not available
# Fallback: Checking the mtime of the lists
try:
mtime = os.stat(APT_LISTS_PATH).st_mtime
except:
mtime = False
if mtime is False:
# No mtime could be read - looks like lists are not there
# We update the cache to be safe
cache_valid = False
else:
mtimestamp = datetime.datetime.fromtimestamp(mtime)
if mtimestamp + tdelta >= datetime.datetime.now():
# dont update the cache
# the old cache is less than cache_valid_time seconds old - so still valid
cache_valid = True
if cache_valid is not True:
cache.update()
cache.open(progress=None)
if not p['package'] and not p['upgrade'] and not p['deb']:
module.exit_json(changed=False)
force_yes = p['force']
if p['upgrade']:
upgrade(module, p['upgrade'], force_yes,
p['default_release'], dpkg_options)
if p['deb']:
if p['state'] != "installed":
module.fail_json(msg="deb only supports state=installed")
install_deb(module, p['deb'], cache,
install_recommends=install_recommends,
force=force_yes, dpkg_options=p['dpkg_options'])
packages = p['package']
latest = p['state'] == 'latest'
for package in packages:
if package.count('=') > 1:
module.fail_json(msg="invalid package spec: %s" % package)
if latest and '=' in package:
module.fail_json(msg='version number inconsistent with state=latest: %s' % package)
if p['state'] == 'latest':
result = install(module, packages, cache, upgrade=True,
default_release=p['default_release'],
install_recommends=install_recommends,
force=force_yes, dpkg_options=dpkg_options)
(success, retvals) = result
if success:
module.exit_json(**retvals)
else:
module.fail_json(**retvals)
elif p['state'] in [ 'installed', 'present' ]:
result = install(module, packages, cache, default_release=p['default_release'],
install_recommends=install_recommends,force=force_yes,
dpkg_options=dpkg_options)
(success, retvals) = result
if success:
module.exit_json(**retvals)
else:
module.fail_json(**retvals)
elif p['state'] in [ 'removed', 'absent' ]:
remove(module, packages, cache, p['purge'], dpkg_options)
except apt.cache.LockFailedException:
module.fail_json(msg="Failed to lock apt for exclusive operation")
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,277 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2012, Jayson Vantuyl <jayson@aggressive.ly>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: apt_key
author: Jayson Vantuyl & others
version_added: "1.0"
short_description: Add or remove an apt key
description:
- Add or remove an I(apt) key, optionally downloading it
notes:
- doesn't download the key unless it really needs it
- as a sanity check, downloaded key id must match the one specified
- best practice is to specify the key id and the url
options:
id:
required: false
default: none
description:
- identifier of key
data:
required: false
default: none
description:
- keyfile contents
file:
required: false
default: none
description:
- keyfile path
keyring:
required: false
default: none
description:
- path to specific keyring file in /etc/apt/trusted.gpg.d
version_added: "1.3"
url:
required: false
default: none
description:
- url to retrieve key from.
keyserver:
version_added: "1.6"
required: false
default: none
description:
- keyserver to retrieve key from.
state:
required: false
choices: [ absent, present ]
default: present
description:
- used to specify if key is being added or revoked
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
'''
EXAMPLES = '''
# Add an Apt signing key, uses whichever key is at the URL
- apt_key: url=https://ftp-master.debian.org/keys/archive-key-6.0.asc state=present
# Add an Apt signing key, will not download if present
- apt_key: id=473041FA url=https://ftp-master.debian.org/keys/archive-key-6.0.asc state=present
# Remove an Apt signing key, uses whichever key is at the URL
- apt_key: url=https://ftp-master.debian.org/keys/archive-key-6.0.asc state=absent
# Remove a Apt specific signing key, leading 0x is valid
- apt_key: id=0x473041FA state=absent
# Add a key from a file on the Ansible server
- apt_key: data="{{ lookup('file', 'apt.gpg') }}" state=present
# Add an Apt signing key to a specific keyring file
- apt_key: id=473041FA url=https://ftp-master.debian.org/keys/archive-key-6.0.asc keyring=/etc/apt/trusted.gpg.d/debian.gpg state=present
'''
# FIXME: standardize into module_common
from traceback import format_exc
from re import compile as re_compile
# FIXME: standardize into module_common
from distutils.spawn import find_executable
from os import environ
from sys import exc_info
import traceback
match_key = re_compile("^gpg:.*key ([0-9a-fA-F]+):.*$")
REQUIRED_EXECUTABLES=['gpg', 'grep', 'apt-key']
def check_missing_binaries(module):
missing = [e for e in REQUIRED_EXECUTABLES if not find_executable(e)]
if len(missing):
module.fail_json(msg="binaries are missing", names=missing)
def all_keys(module, keyring, short_format):
if keyring:
cmd = "apt-key --keyring %s adv --list-public-keys --keyid-format=long" % keyring
else:
cmd = "apt-key adv --list-public-keys --keyid-format=long"
(rc, out, err) = module.run_command(cmd)
results = []
lines = out.split('\n')
for line in lines:
if line.startswith("pub"):
tokens = line.split()
code = tokens[1]
(len_type, real_code) = code.split("/")
results.append(real_code)
if short_format:
results = shorten_key_ids(results)
return results
def shorten_key_ids(key_id_list):
"""
Takes a list of key ids, and converts them to the 'short' format,
by reducing them to their last 8 characters.
"""
short = []
for key in key_id_list:
short.append(key[-8:])
return short
def download_key(module, url):
# FIXME: move get_url code to common, allow for in-memory D/L, support proxies
# and reuse here
if url is None:
module.fail_json(msg="needed a URL but was not specified")
try:
rsp, info = fetch_url(module, url)
if info['status'] != 200:
module.fail_json(msg="Failed to download key at %s: %s" % (url, info['msg']))
return rsp.read()
except Exception:
module.fail_json(msg="error getting key id from url: %s" % url, traceback=format_exc())
def import_key(module, keyserver, key_id):
cmd = "apt-key adv --keyserver %s --recv %s" % (keyserver, key_id)
(rc, out, err) = module.run_command(cmd, check_rc=True)
return True
def add_key(module, keyfile, keyring, data=None):
if data is not None:
if keyring:
cmd = "apt-key --keyring %s add -" % keyring
else:
cmd = "apt-key add -"
(rc, out, err) = module.run_command(cmd, data=data, check_rc=True, binary_data=True)
else:
if keyring:
cmd = "apt-key --keyring %s add %s" % (keyring, keyfile)
else:
cmd = "apt-key add %s" % (keyfile)
(rc, out, err) = module.run_command(cmd, check_rc=True)
return True
def remove_key(module, key_id, keyring):
# FIXME: use module.run_command, fail at point of error and don't discard useful stdin/stdout
if keyring:
cmd = 'apt-key --keyring %s del %s' % (keyring, key_id)
else:
cmd = 'apt-key del %s' % key_id
(rc, out, err) = module.run_command(cmd, check_rc=True)
return True
def main():
module = AnsibleModule(
argument_spec=dict(
id=dict(required=False, default=None),
url=dict(required=False),
data=dict(required=False),
file=dict(required=False),
key=dict(required=False),
keyring=dict(required=False),
validate_certs=dict(default='yes', type='bool'),
keyserver=dict(required=False),
state=dict(required=False, choices=['present', 'absent'], default='present')
),
supports_check_mode=True
)
key_id = module.params['id']
url = module.params['url']
data = module.params['data']
filename = module.params['file']
keyring = module.params['keyring']
state = module.params['state']
keyserver = module.params['keyserver']
changed = False
if key_id:
try:
_ = int(key_id, 16)
if key_id.startswith('0x'):
key_id = key_id[2:]
key_id = key_id.upper()
except ValueError:
module.fail_json(msg="Invalid key_id", id=key_id)
# FIXME: I think we have a common facility for this, if not, want
check_missing_binaries(module)
short_format = (key_id is not None and len(key_id) == 8)
keys = all_keys(module, keyring, short_format)
return_values = {}
if state == 'present':
if key_id and key_id in keys:
module.exit_json(changed=False)
else:
if not filename and not data and not keyserver:
data = download_key(module, url)
if key_id and key_id in keys:
module.exit_json(changed=False)
else:
if module.check_mode:
module.exit_json(changed=True)
if filename:
add_key(module, filename, keyring)
elif keyserver:
import_key(module, keyserver, key_id)
else:
add_key(module, "-", keyring, data)
changed=False
keys2 = all_keys(module, keyring, short_format)
if len(keys) != len(keys2):
changed=True
if key_id and not key_id[-16:] in keys2:
module.fail_json(msg="key does not seem to have been added", id=key_id)
module.exit_json(changed=changed)
elif state == 'absent':
if not key_id:
module.fail_json(msg="key is required")
if key_id in keys:
if module.check_mode:
module.exit_json(changed=True)
if remove_key(module, key_id, keyring):
changed=True
else:
# FIXME: module.fail_json or exit-json immediately at point of failure
module.fail_json(msg="error removing key_id", **return_values)
module.exit_json(changed=changed, **return_values)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()

@ -1,446 +0,0 @@
#!/usr/bin/python
# encoding: utf-8
# (c) 2012, Matt Wright <matt@nobien.net>
# (c) 2013, Alexander Saltanov <asd@mokote.com>
# (c) 2014, Rutger Spiertz <rutger@kumina.nl>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: apt_repository
short_description: Add and remove APT repositories
description:
- Add or remove an APT repositories in Ubuntu and Debian.
notes:
- This module works on Debian and Ubuntu and requires C(python-apt).
- This module supports Debian Squeeze (version 6) as well as its successors.
- This module treats Debian and Ubuntu distributions separately. So PPA could be installed only on Ubuntu machines.
options:
repo:
required: true
default: none
description:
- A source string for the repository.
state:
required: false
choices: [ "absent", "present" ]
default: "present"
description:
- A source string state.
mode:
required: false
default: 0644
description:
- The octal mode for newly created files in sources.list.d
version_added: "1.6"
update_cache:
description:
- Run the equivalent of C(apt-get update) when a change occurs. Cache updates are run after making changes.
required: false
default: "yes"
choices: [ "yes", "no" ]
validate_certs:
version_added: '1.8'
description:
- If C(no), SSL certificates for the target repo will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
author: Alexander Saltanov
version_added: "0.7"
requirements: [ python-apt ]
'''
EXAMPLES = '''
# Add specified repository into sources list.
apt_repository: repo='deb http://archive.canonical.com/ubuntu hardy partner' state=present
# Add source repository into sources list.
apt_repository: repo='deb-src http://archive.canonical.com/ubuntu hardy partner' state=present
# Remove specified repository from sources list.
apt_repository: repo='deb http://archive.canonical.com/ubuntu hardy partner' state=absent
# On Ubuntu target: add nginx stable repository from PPA and install its signing key.
# On Debian target: adding PPA is not available, so it will fail immediately.
apt_repository: repo='ppa:nginx/stable'
'''
import glob
import os
import re
import tempfile
try:
import apt
import apt_pkg
import aptsources.distro as aptsources_distro
distro = aptsources_distro.get_distro()
HAVE_PYTHON_APT = True
except ImportError:
distro = None
HAVE_PYTHON_APT = False
VALID_SOURCE_TYPES = ('deb', 'deb-src')
def install_python_apt(module):
if not module.check_mode:
apt_get_path = module.get_bin_path('apt-get')
if apt_get_path:
rc, so, se = module.run_command('%s update && %s install python-apt -y -q' % (apt_get_path, apt_get_path), use_unsafe_shell=True)
if rc == 0:
global apt, apt_pkg, aptsources_distro, distro, HAVE_PYTHON_APT
import apt
import apt_pkg
import aptsources.distro as aptsources_distro
distro = aptsources_distro.get_distro()
HAVE_PYTHON_APT = True
else:
module.fail_json(msg="Failed to auto-install python-apt. Error was: '%s'" % se.strip())
class InvalidSource(Exception):
pass
# Simple version of aptsources.sourceslist.SourcesList.
# No advanced logic and no backups inside.
class SourcesList(object):
def __init__(self):
self.files = {} # group sources by file
self.default_file = self._apt_cfg_file('Dir::Etc::sourcelist')
# read sources.list if it exists
if os.path.isfile(self.default_file):
self.load(self.default_file)
# read sources.list.d
for file in glob.iglob('%s/*.list' % self._apt_cfg_dir('Dir::Etc::sourceparts')):
self.load(file)
def __iter__(self):
'''Simple iterator to go over all sources. Empty, non-source, and other not valid lines will be skipped.'''
for file, sources in self.files.items():
for n, valid, enabled, source, comment in sources:
if valid:
yield file, n, enabled, source, comment
raise StopIteration
def _expand_path(self, filename):
if '/' in filename:
return filename
else:
return os.path.abspath(os.path.join(self._apt_cfg_dir('Dir::Etc::sourceparts'), filename))
def _suggest_filename(self, line):
def _cleanup_filename(s):
return '_'.join(re.sub('[^a-zA-Z0-9]', ' ', s).split())
def _strip_username_password(s):
if '@' in s:
s = s.split('@', 1)
s = s[-1]
return s
# Drop options and protocols.
line = re.sub('\[[^\]]+\]', '', line)
line = re.sub('\w+://', '', line)
# split line into valid keywords
parts = [part for part in line.split() if part not in VALID_SOURCE_TYPES]
# Drop usernames and passwords
parts[0] = _strip_username_password(parts[0])
return '%s.list' % _cleanup_filename(' '.join(parts[:1]))
def _parse(self, line, raise_if_invalid_or_disabled=False):
valid = False
enabled = True
source = ''
comment = ''
line = line.strip()
if line.startswith('#'):
enabled = False
line = line[1:]
# Check for another "#" in the line and treat a part after it as a comment.
i = line.find('#')
if i > 0:
comment = line[i+1:].strip()
line = line[:i]
# Split a source into substring to make sure that it is source spec.
# Duplicated whitespaces in a valid source spec will be removed.
source = line.strip()
if source:
chunks = source.split()
if chunks[0] in VALID_SOURCE_TYPES:
valid = True
source = ' '.join(chunks)
if raise_if_invalid_or_disabled and (not valid or not enabled):
raise InvalidSource(line)
return valid, enabled, source, comment
@staticmethod
def _apt_cfg_file(filespec):
'''
Wrapper for `apt_pkg` module for running with Python 2.5
'''
try:
result = apt_pkg.config.find_file(filespec)
except AttributeError:
result = apt_pkg.Config.FindFile(filespec)
return result
@staticmethod
def _apt_cfg_dir(dirspec):
'''
Wrapper for `apt_pkg` module for running with Python 2.5
'''
try:
result = apt_pkg.config.find_dir(dirspec)
except AttributeError:
result = apt_pkg.Config.FindDir(dirspec)
return result
def load(self, file):
group = []
f = open(file, 'r')
for n, line in enumerate(f):
valid, enabled, source, comment = self._parse(line)
group.append((n, valid, enabled, source, comment))
self.files[file] = group
def save(self, module):
for filename, sources in self.files.items():
if sources:
d, fn = os.path.split(filename)
fd, tmp_path = tempfile.mkstemp(prefix=".%s-" % fn, dir=d)
# allow the user to override the default mode
this_mode = module.params['mode']
module.set_mode_if_different(tmp_path, this_mode, False)
f = os.fdopen(fd, 'w')
for n, valid, enabled, source, comment in sources:
chunks = []
if not enabled:
chunks.append('# ')
chunks.append(source)
if comment:
chunks.append(' # ')
chunks.append(comment)
chunks.append('\n')
line = ''.join(chunks)
try:
f.write(line)
except IOError, err:
module.fail_json(msg="Failed to write to file %s: %s" % (tmp_path, unicode(err)))
module.atomic_move(tmp_path, filename)
else:
del self.files[filename]
if os.path.exists(filename):
os.remove(filename)
def dump(self):
return '\n'.join([str(i) for i in self])
def modify(self, file, n, enabled=None, source=None, comment=None):
'''
This function to be used with iterator, so we don't care of invalid sources.
If source, enabled, or comment is None, original value from line ``n`` will be preserved.
'''
valid, enabled_old, source_old, comment_old = self.files[file][n][1:]
choice = lambda new, old: old if new is None else new
self.files[file][n] = (n, valid, choice(enabled, enabled_old), choice(source, source_old), choice(comment, comment_old))
def _add_valid_source(self, source_new, comment_new, file):
# We'll try to reuse disabled source if we have it.
# If we have more than one entry, we will enable them all - no advanced logic, remember.
found = False
for filename, n, enabled, source, comment in self:
if source == source_new:
self.modify(filename, n, enabled=True)
found = True
if not found:
if file is None:
file = self.default_file
else:
file = self._expand_path(file)
if file not in self.files:
self.files[file] = []
files = self.files[file]
files.append((len(files), True, True, source_new, comment_new))
def add_source(self, line, comment='', file=None):
source = self._parse(line, raise_if_invalid_or_disabled=True)[2]
# Prefer separate files for new sources.
self._add_valid_source(source, comment, file=file or self._suggest_filename(source))
def _remove_valid_source(self, source):
# If we have more than one entry, we will remove them all (not comment, remove!)
for filename, n, enabled, src, comment in self:
if source == src and enabled:
self.files[filename].pop(n)
def remove_source(self, line):
source = self._parse(line, raise_if_invalid_or_disabled=True)[2]
self._remove_valid_source(source)
class UbuntuSourcesList(SourcesList):
LP_API = 'https://launchpad.net/api/1.0/~%s/+archive/%s'
def __init__(self, module, add_ppa_signing_keys_callback=None):
self.module = module
self.add_ppa_signing_keys_callback = add_ppa_signing_keys_callback
super(UbuntuSourcesList, self).__init__()
def _get_ppa_info(self, owner_name, ppa_name):
lp_api = self.LP_API % (owner_name, ppa_name)
headers = dict(Accept='application/json')
response, info = fetch_url(self.module, lp_api, headers=headers)
if info['status'] != 200:
self.module.fail_json(msg="failed to fetch PPA information, error was: %s" % info['msg'])
return json.load(response)
def _expand_ppa(self, path):
ppa = path.split(':')[1]
ppa_owner = ppa.split('/')[0]
try:
ppa_name = ppa.split('/')[1]
except IndexError:
ppa_name = 'ppa'
line = 'deb http://ppa.launchpad.net/%s/%s/ubuntu %s main' % (ppa_owner, ppa_name, distro.codename)
return line, ppa_owner, ppa_name
def _key_already_exists(self, key_fingerprint):
rc, out, err = self.module.run_command('apt-key export %s' % key_fingerprint, check_rc=True)
return len(err) == 0
def add_source(self, line, comment='', file=None):
if line.startswith('ppa:'):
source, ppa_owner, ppa_name = self._expand_ppa(line)
if self.add_ppa_signing_keys_callback is not None:
info = self._get_ppa_info(ppa_owner, ppa_name)
if not self._key_already_exists(info['signing_key_fingerprint']):
command = ['apt-key', 'adv', '--recv-keys', '--keyserver', 'hkp://keyserver.ubuntu.com:80', info['signing_key_fingerprint']]
self.add_ppa_signing_keys_callback(command)
file = file or self._suggest_filename('%s_%s' % (line, distro.codename))
else:
source = self._parse(line, raise_if_invalid_or_disabled=True)[2]
file = file or self._suggest_filename(source)
self._add_valid_source(source, comment, file)
def remove_source(self, line):
if line.startswith('ppa:'):
source = self._expand_ppa(line)[0]
else:
source = self._parse(line, raise_if_invalid_or_disabled=True)[2]
self._remove_valid_source(source)
def get_add_ppa_signing_key_callback(module):
def _run_command(command):
module.run_command(command, check_rc=True)
if module.check_mode:
return None
else:
return _run_command
def main():
module = AnsibleModule(
argument_spec=dict(
repo=dict(required=True),
state=dict(choices=['present', 'absent'], default='present'),
mode=dict(required=False, default=0644),
update_cache = dict(aliases=['update-cache'], type='bool', default='yes'),
# this should not be needed, but exists as a failsafe
install_python_apt=dict(required=False, default="yes", type='bool'),
validate_certs = dict(default='yes', type='bool'),
),
supports_check_mode=True,
)
params = module.params
if params['install_python_apt'] and not HAVE_PYTHON_APT and not module.check_mode:
install_python_apt(module)
repo = module.params['repo']
state = module.params['state']
update_cache = module.params['update_cache']
sourceslist = None
if HAVE_PYTHON_APT:
if isinstance(distro, aptsources_distro.UbuntuDistribution):
sourceslist = UbuntuSourcesList(module,
add_ppa_signing_keys_callback=get_add_ppa_signing_key_callback(module))
elif HAVE_PYTHON_APT and \
isinstance(distro, aptsources_distro.DebianDistribution) or isinstance(distro, aptsources_distro.Distribution):
sourceslist = SourcesList()
else:
module.fail_json(msg='Module apt_repository supports only Debian and Ubuntu. ' + \
'You may be seeing this because python-apt is not installed, but you requested that it not be auto-installed')
sources_before = sourceslist.dump()
try:
if state == 'present':
sourceslist.add_source(repo)
elif state == 'absent':
sourceslist.remove_source(repo)
except InvalidSource, err:
module.fail_json(msg='Invalid repository string: %s' % unicode(err))
sources_after = sourceslist.dump()
changed = sources_before != sources_after
if not module.check_mode and changed:
try:
sourceslist.save(module)
if update_cache:
cache = apt.Cache()
cache.update()
except OSError, err:
module.fail_json(msg=unicode(err))
module.exit_json(changed=changed, repo=repo, state=state)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()

@ -1,172 +0,0 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2013, Evgenii Terechkov
# Written by Evgenii Terechkov <evg@altlinux.org>
# Based on urpmi module written by Philippe Makowski <philippem@mageia.org>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: apt_rpm
short_description: apt_rpm package manager
description:
- Manages packages with I(apt-rpm). Both low-level (I(rpm)) and high-level (I(apt-get)) package manager binaries required.
version_added: "1.5"
options:
pkg:
description:
- name of package to install, upgrade or remove.
required: true
default: null
state:
description:
- Indicates the desired package state
required: false
default: present
choices: [ "absent", "present" ]
update_cache:
description:
- update the package database first C(apt-get update).
required: false
default: no
choices: [ "yes", "no" ]
author: Evgenii Terechkov
notes: []
'''
EXAMPLES = '''
# install package foo
- apt_rpm: pkg=foo state=present
# remove package foo
- apt_rpm: pkg=foo state=absent
# description: remove packages foo and bar
- apt_rpm: pkg=foo,bar state=absent
# description: update the package database and install bar (bar will be the updated if a newer version exists)
- apt_rpm: name=bar state=present update_cache=yes
'''
try:
import json
except ImportError:
import simplejson as json
import shlex
import os
import sys
APT_PATH="/usr/bin/apt-get"
RPM_PATH="/usr/bin/rpm"
def query_package(module, name):
# rpm -q returns 0 if the package is installed,
# 1 if it is not installed
rc = os.system("%s -q %s" % (RPM_PATH,name))
if rc == 0:
return True
else:
return False
def query_package_provides(module, name):
# rpm -q returns 0 if the package is installed,
# 1 if it is not installed
rc = os.system("%s -q --provides %s >/dev/null" % (RPM_PATH,name))
return rc == 0
def update_package_db(module):
rc = os.system("%s update" % APT_PATH)
if rc != 0:
module.fail_json(msg="could not update package db")
def remove_packages(module, packages):
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
rc = os.system("%s -y remove %s > /dev/null" % (APT_PATH,package))
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, pkgspec):
packages = ""
for package in pkgspec:
if not query_package_provides(module, package):
packages += "'%s' " % package
if len(packages) != 0:
cmd = ("%s -y install %s > /dev/null" % (APT_PATH, packages))
rc, out, err = module.run_command(cmd)
installed = True
for packages in pkgspec:
if not query_package_provides(module, package):
installed = False
# apt-rpm always have 0 for exit code if --force is used
if rc or not installed:
module.fail_json(msg="'apt-get -y install %s' failed: %s" % (packages, err))
else:
module.exit_json(changed=True, msg="%s present(s)" % packages)
else:
module.exit_json(changed=False)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='installed', choices=['installed', 'removed', 'absent', 'present']),
update_cache = dict(default=False, aliases=['update-cache'], type='bool'),
package = dict(aliases=['pkg', 'name'], required=True)))
if not os.path.exists(APT_PATH) or not os.path.exists(RPM_PATH):
module.fail_json(msg="cannot find /usr/bin/apt-get and/or /usr/bin/rpm")
p = module.params
if p['update_cache']:
update_package_db(module)
packages = p['package'].split(',')
if p['state'] in [ 'installed', 'present' ]:
install_packages(module, packages)
elif p['state'] in [ 'removed', 'absent' ]:
remove_packages(module, packages)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
main()

@ -1,188 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Matt Wright <matt@nobien.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import tempfile
import os.path
DOCUMENTATION = '''
---
module: easy_install
short_description: Installs Python libraries
description:
- Installs Python libraries, optionally in a I(virtualenv)
version_added: "0.7"
options:
name:
description:
- A Python library name
required: true
default: null
aliases: []
virtualenv:
description:
- an optional I(virtualenv) directory path to install into. If the
I(virtualenv) does not exist, it is created automatically
required: false
default: null
virtualenv_site_packages:
version_added: "1.1"
description:
- Whether the virtual environment will inherit packages from the
global site-packages directory. Note that if this setting is
changed on an already existing virtual environment it will not
have any effect, the environment must be deleted and newly
created.
required: false
default: "no"
choices: [ "yes", "no" ]
virtualenv_command:
version_added: "1.1"
description:
- The command to create the virtual environment with. For example
C(pyvenv), C(virtualenv), C(virtualenv2).
required: false
default: virtualenv
executable:
description:
- The explicit executable or a pathname to the executable to be used to
run easy_install for a specific version of Python installed in the
system. For example C(easy_install-3.3), if there are both Python 2.7
and 3.3 installations in the system and you want to run easy_install
for the Python 3.3 installation.
version_added: "1.3"
required: false
default: null
notes:
- Please note that the M(easy_install) module can only install Python
libraries. Thus this module is not able to remove libraries. It is
generally recommended to use the M(pip) module which you can first install
using M(easy_install).
- Also note that I(virtualenv) must be installed on the remote host if the
C(virtualenv) parameter is specified.
requirements: [ "virtualenv" ]
author: Matt Wright
'''
EXAMPLES = '''
# Examples from Ansible Playbooks
- easy_install: name=pip
# Install Bottle into the specified virtualenv.
- easy_install: name=bottle virtualenv=/webapps/myapp/venv
'''
def _is_package_installed(module, name, easy_install):
cmd = '%s --dry-run %s' % (easy_install, name)
rc, status_stdout, status_stderr = module.run_command(cmd)
return not ('Reading' in status_stdout or 'Downloading' in status_stdout)
def _get_easy_install(module, env=None, executable=None):
candidate_easy_inst_basenames = ['easy_install']
easy_install = None
if executable is not None:
if os.path.isabs(executable):
easy_install = executable
else:
candidate_easy_inst_basenames.insert(0, executable)
if easy_install is None:
if env is None:
opt_dirs = []
else:
# Try easy_install with the virtualenv directory first.
opt_dirs = ['%s/bin' % env]
for basename in candidate_easy_inst_basenames:
easy_install = module.get_bin_path(basename, False, opt_dirs)
if easy_install is not None:
break
# easy_install should have been found by now. The final call to
# get_bin_path will trigger fail_json.
if easy_install is None:
basename = candidate_easy_inst_basenames[0]
easy_install = module.get_bin_path(basename, True, opt_dirs)
return easy_install
def main():
arg_spec = dict(
name=dict(required=True),
virtualenv=dict(default=None, required=False),
virtualenv_site_packages=dict(default='no', type='bool'),
virtualenv_command=dict(default='virtualenv', required=False),
executable=dict(default='easy_install', required=False),
)
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
name = module.params['name']
env = module.params['virtualenv']
executable = module.params['executable']
site_packages = module.params['virtualenv_site_packages']
virtualenv_command = module.params['virtualenv_command']
rc = 0
err = ''
out = ''
if env:
virtualenv = module.get_bin_path(virtualenv_command, True)
if not os.path.exists(os.path.join(env, 'bin', 'activate')):
if module.check_mode:
module.exit_json(changed=True)
command = '%s %s' % (virtualenv, env)
if site_packages:
command += ' --system-site-packages'
cwd = tempfile.gettempdir()
rc_venv, out_venv, err_venv = module.run_command(command, cwd=cwd)
rc += rc_venv
out += out_venv
err += err_venv
easy_install = _get_easy_install(module, env, executable)
cmd = None
changed = False
installed = _is_package_installed(module, name, easy_install)
if not installed:
if module.check_mode:
module.exit_json(changed=True)
cmd = '%s %s' % (easy_install, name)
rc_easy_inst, out_easy_inst, err_easy_inst = module.run_command(cmd)
rc += rc_easy_inst
out += out_easy_inst
err += err_easy_inst
changed = True
if rc != 0:
module.fail_json(msg=err, cmd=cmd)
module.exit_json(changed=changed, binary=easy_install,
name=name, virtualenv=env)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,238 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Johan Wiren <johan.wiren.se@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: gem
short_description: Manage Ruby gems
description:
- Manage installation and uninstallation of Ruby gems.
version_added: "1.1"
options:
name:
description:
- The name of the gem to be managed.
required: true
state:
description:
- The desired state of the gem. C(latest) ensures that the latest version is installed.
required: false
choices: [present, absent, latest]
default: present
gem_source:
description:
- The path to a local gem used as installation source.
required: false
include_dependencies:
description:
- Whether to include dependencies or not.
required: false
choices: [ "yes", "no" ]
default: "yes"
repository:
description:
- The repository from which the gem will be installed
required: false
aliases: [source]
user_install:
description:
- Install gem in user's local gems cache or for all users
required: false
default: "yes"
version_added: "1.3"
executable:
description:
- Override the path to the gem executable
required: false
version_added: "1.4"
version:
description:
- Version of the gem to be installed/removed.
required: false
pre_release:
description:
- Allow installation of pre-release versions of the gem.
required: false
default: "no"
version_added: "1.6"
author: Johan Wiren
'''
EXAMPLES = '''
# Installs version 1.0 of vagrant.
- gem: name=vagrant version=1.0 state=present
# Installs latest available version of rake.
- gem: name=rake state=latest
# Installs rake version 1.0 from a local gem on disk.
- gem: name=rake gem_source=/path/to/gems/rake-1.0.gem state=present
'''
import re
def get_rubygems_path(module):
if module.params['executable']:
return module.params['executable'].split(' ')
else:
return [ module.get_bin_path('gem', True) ]
def get_rubygems_version(module):
cmd = get_rubygems_path(module) + [ '--version' ]
(rc, out, err) = module.run_command(cmd, check_rc=True)
match = re.match(r'^(\d+)\.(\d+)\.(\d+)', out)
if not match:
return None
return tuple(int(x) for x in match.groups())
def get_installed_versions(module, remote=False):
cmd = get_rubygems_path(module)
cmd.append('query')
if remote:
cmd.append('--remote')
if module.params['repository']:
cmd.extend([ '--source', module.params['repository'] ])
cmd.append('-n')
cmd.append('^%s$' % module.params['name'])
(rc, out, err) = module.run_command(cmd, check_rc=True)
installed_versions = []
for line in out.splitlines():
match = re.match(r"\S+\s+\((.+)\)", line)
if match:
versions = match.group(1)
for version in versions.split(', '):
installed_versions.append(version.split()[0])
return installed_versions
def exists(module):
if module.params['state'] == 'latest':
remoteversions = get_installed_versions(module, remote=True)
if remoteversions:
module.params['version'] = remoteversions[0]
installed_versions = get_installed_versions(module)
if module.params['version']:
if module.params['version'] in installed_versions:
return True
else:
if installed_versions:
return True
return False
def uninstall(module):
if module.check_mode:
return
cmd = get_rubygems_path(module)
cmd.append('uninstall')
if module.params['version']:
cmd.extend([ '--version', module.params['version'] ])
else:
cmd.append('--all')
cmd.append('--executable')
cmd.append(module.params['name'])
module.run_command(cmd, check_rc=True)
def install(module):
if module.check_mode:
return
ver = get_rubygems_version(module)
if ver:
major = ver[0]
else:
major = None
cmd = get_rubygems_path(module)
cmd.append('install')
if module.params['version']:
cmd.extend([ '--version', module.params['version'] ])
if module.params['repository']:
cmd.extend([ '--source', module.params['repository'] ])
if not module.params['include_dependencies']:
cmd.append('--ignore-dependencies')
else:
if major and major < 2:
cmd.append('--include-dependencies')
if module.params['user_install']:
cmd.append('--user-install')
else:
cmd.append('--no-user-install')
if module.params['pre_release']:
cmd.append('--pre')
cmd.append('--no-rdoc')
cmd.append('--no-ri')
cmd.append(module.params['gem_source'])
module.run_command(cmd, check_rc=True)
def main():
module = AnsibleModule(
argument_spec = dict(
executable = dict(required=False, type='str'),
gem_source = dict(required=False, type='str'),
include_dependencies = dict(required=False, default=True, type='bool'),
name = dict(required=True, type='str'),
repository = dict(required=False, aliases=['source'], type='str'),
state = dict(required=False, default='present', choices=['present','absent','latest'], type='str'),
user_install = dict(required=False, default=True, type='bool'),
pre_release = dict(required=False, default=False, type='bool'),
version = dict(required=False, type='str'),
),
supports_check_mode = True,
mutually_exclusive = [ ['gem_source','repository'], ['gem_source','version'] ],
)
if module.params['version'] and module.params['state'] == 'latest':
module.fail_json(msg="Cannot specify version when state=latest")
if module.params['gem_source'] and module.params['state'] == 'latest':
module.fail_json(msg="Cannot maintain state=latest when installing from local source")
if not module.params['gem_source']:
module.params['gem_source'] = module.params['name']
changed = False
if module.params['state'] in [ 'present', 'latest']:
if not exists(module):
install(module)
changed = True
elif module.params['state'] == 'absent':
if exists(module):
uninstall(module)
changed = True
result = {}
result['name'] = module.params['name']
result['state'] = module.params['state']
if module.params['version']:
result['version'] = module.params['version']
result['changed'] = changed
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,356 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Matt Wright <matt@nobien.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import tempfile
import os
DOCUMENTATION = '''
---
module: pip
short_description: Manages Python library dependencies.
description:
- "Manage Python library dependencies. To use this module, one of the following keys is required: C(name)
or C(requirements)."
version_added: "0.7"
options:
name:
description:
- The name of a Python library to install or the url of the remote package.
required: false
default: null
version:
description:
- The version number to install of the Python library specified in the I(name) parameter
required: false
default: null
requirements:
description:
- The path to a pip requirements file
required: false
default: null
virtualenv:
description:
- An optional path to a I(virtualenv) directory to install into
required: false
default: null
virtualenv_site_packages:
version_added: "1.0"
description:
- Whether the virtual environment will inherit packages from the
global site-packages directory. Note that if this setting is
changed on an already existing virtual environment it will not
have any effect, the environment must be deleted and newly
created.
required: false
default: "no"
choices: [ "yes", "no" ]
virtualenv_command:
version_aded: "1.1"
description:
- The command or a pathname to the command to create the virtual
environment with. For example C(pyvenv), C(virtualenv),
C(virtualenv2), C(~/bin/virtualenv), C(/usr/local/bin/virtualenv).
required: false
default: virtualenv
state:
description:
- The state of module
required: false
default: present
choices: [ "present", "absent", "latest" ]
extra_args:
description:
- Extra arguments passed to pip.
required: false
default: null
version_added: "1.0"
chdir:
description:
- cd into this directory before running the command
version_added: "1.3"
required: false
default: null
executable:
description:
- The explicit executable or a pathname to the executable to be used to
run pip for a specific version of Python installed in the system. For
example C(pip-3.3), if there are both Python 2.7 and 3.3 installations
in the system and you want to run pip for the Python 3.3 installation.
version_added: "1.3"
required: false
default: null
notes:
- Please note that virtualenv (U(http://www.virtualenv.org/)) must be installed on the remote host if the virtualenv parameter is specified.
requirements: [ "virtualenv", "pip" ]
author: Matt Wright
'''
EXAMPLES = '''
# Install (Bottle) python package.
- pip: name=bottle
# Install (Bottle) python package on version 0.11.
- pip: name=bottle version=0.11
# Install (MyApp) using one of the remote protocols (bzr+,hg+,git+,svn+). You do not have to supply '-e' option in extra_args.
- pip: name='svn+http://myrepo/svn/MyApp#egg=MyApp'
# Install (Bottle) into the specified (virtualenv), inheriting none of the globally installed modules
- pip: name=bottle virtualenv=/my_app/venv
# Install (Bottle) into the specified (virtualenv), inheriting globally installed modules
- pip: name=bottle virtualenv=/my_app/venv virtualenv_site_packages=yes
# Install (Bottle) into the specified (virtualenv), using Python 2.7
- pip: name=bottle virtualenv=/my_app/venv virtualenv_command=virtualenv-2.7
# Install specified python requirements.
- pip: requirements=/my_app/requirements.txt
# Install specified python requirements in indicated (virtualenv).
- pip: requirements=/my_app/requirements.txt virtualenv=/my_app/venv
# Install specified python requirements and custom Index URL.
- pip: requirements=/my_app/requirements.txt extra_args='-i https://example.com/pypi/simple'
# Install (Bottle) for Python 3.3 specifically,using the 'pip-3.3' executable.
- pip: name=bottle executable=pip-3.3
'''
def _get_cmd_options(module, cmd):
thiscmd = cmd + " --help"
rc, stdout, stderr = module.run_command(thiscmd)
if rc != 0:
module.fail_json(msg="Could not get output from %s: %s" % (thiscmd, stdout + stderr))
words = stdout.strip().split()
cmd_options = [ x for x in words if x.startswith('--') ]
return cmd_options
def _get_full_name(name, version=None):
if version is None:
resp = name
else:
resp = name + '==' + version
return resp
def _is_present(name, version, installed_pkgs):
for pkg in installed_pkgs:
if '==' not in pkg:
continue
[pkg_name, pkg_version] = pkg.split('==')
if pkg_name == name and (version is None or version == pkg_version):
return True
return False
def _get_pip(module, env=None, executable=None):
# On Debian and Ubuntu, pip is pip.
# On Fedora18 and up, pip is python-pip.
# On Fedora17 and below, CentOS and RedHat 6 and 5, pip is pip-python.
# On Fedora, CentOS, and RedHat, the exception is in the virtualenv.
# There, pip is just pip.
candidate_pip_basenames = ['pip', 'python-pip', 'pip-python']
pip = None
if executable is not None:
if os.path.isabs(executable):
pip = executable
else:
# If you define your own executable that executable should be the only candidate.
candidate_pip_basenames = [executable]
if pip is None:
if env is None:
opt_dirs = []
else:
# Try pip with the virtualenv directory first.
opt_dirs = ['%s/bin' % env]
for basename in candidate_pip_basenames:
pip = module.get_bin_path(basename, False, opt_dirs)
if pip is not None:
break
# pip should have been found by now. The final call to get_bin_path will
# trigger fail_json.
if pip is None:
basename = candidate_pip_basenames[0]
pip = module.get_bin_path(basename, True, opt_dirs)
return pip
def _fail(module, cmd, out, err):
msg = ''
if out:
msg += "stdout: %s" % (out, )
if err:
msg += "\n:stderr: %s" % (err, )
module.fail_json(cmd=cmd, msg=msg)
def main():
state_map = dict(
present='install',
absent='uninstall -y',
latest='install -U',
)
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', choices=state_map.keys()),
name=dict(default=None, required=False),
version=dict(default=None, required=False, type='str'),
requirements=dict(default=None, required=False),
virtualenv=dict(default=None, required=False),
virtualenv_site_packages=dict(default='no', type='bool'),
virtualenv_command=dict(default='virtualenv', required=False),
use_mirrors=dict(default='yes', type='bool'),
extra_args=dict(default=None, required=False),
chdir=dict(default=None, required=False),
executable=dict(default=None, required=False),
),
required_one_of=[['name', 'requirements']],
mutually_exclusive=[['name', 'requirements']],
supports_check_mode=True
)
state = module.params['state']
name = module.params['name']
version = module.params['version']
requirements = module.params['requirements']
extra_args = module.params['extra_args']
chdir = module.params['chdir']
if state == 'latest' and version is not None:
module.fail_json(msg='version is incompatible with state=latest')
err = ''
out = ''
env = module.params['virtualenv']
virtualenv_command = module.params['virtualenv_command']
if env:
env = os.path.expanduser(env)
virtualenv = os.path.expanduser(virtualenv_command)
if os.path.basename(virtualenv) == virtualenv:
virtualenv = module.get_bin_path(virtualenv_command, True)
if not os.path.exists(os.path.join(env, 'bin', 'activate')):
if module.check_mode:
module.exit_json(changed=True)
if module.params['virtualenv_site_packages']:
cmd = '%s --system-site-packages %s' % (virtualenv, env)
else:
cmd_opts = _get_cmd_options(module, virtualenv)
if '--no-site-packages' in cmd_opts:
cmd = '%s --no-site-packages %s' % (virtualenv, env)
else:
cmd = '%s %s' % (virtualenv, env)
this_dir = tempfile.gettempdir()
if chdir:
this_dir = os.path.join(this_dir, chdir)
rc, out_venv, err_venv = module.run_command(cmd, cwd=this_dir)
out += out_venv
err += err_venv
if rc != 0:
_fail(module, cmd, out, err)
pip = _get_pip(module, env, module.params['executable'])
cmd = '%s %s' % (pip, state_map[state])
# If there's a virtualenv we want things we install to be able to use other
# installations that exist as binaries within this virtualenv. Example: we
# install cython and then gevent -- gevent needs to use the cython binary,
# not just a python package that will be found by calling the right python.
# So if there's a virtualenv, we add that bin/ to the beginning of the PATH
# in run_command by setting path_prefix here.
path_prefix = None
if env:
path_prefix="/".join(pip.split('/')[:-1])
# Automatically apply -e option to extra_args when source is a VCS url. VCS
# includes those beginning with svn+, git+, hg+ or bzr+
if name:
if name.startswith('svn+') or name.startswith('git+') or \
name.startswith('hg+') or name.startswith('bzr+'):
args_list = [] # used if extra_args is not used at all
if extra_args:
args_list = extra_args.split(' ')
if '-e' not in args_list:
args_list.append('-e')
# Ok, we will reconstruct the option string
extra_args = ' '.join(args_list)
if extra_args:
cmd += ' %s' % extra_args
if name:
cmd += ' %s' % _get_full_name(name, version)
elif requirements:
cmd += ' -r %s' % requirements
this_dir = tempfile.gettempdir()
if chdir:
this_dir = os.path.join(this_dir, chdir)
if module.check_mode:
if env or extra_args or requirements or state == 'latest' or not name:
module.exit_json(changed=True)
elif name.startswith('svn+') or name.startswith('git+') or \
name.startswith('hg+') or name.startswith('bzr+'):
module.exit_json(changed=True)
freeze_cmd = '%s freeze' % pip
rc, out_pip, err_pip = module.run_command(freeze_cmd, cwd=this_dir)
if rc != 0:
module.exit_json(changed=True)
out += out_pip
err += err_pip
is_present = _is_present(name, version, out.split())
changed = (state == 'present' and not is_present) or (state == 'absent' and is_present)
module.exit_json(changed=changed, cmd=freeze_cmd, stdout=out, stderr=err)
rc, out_pip, err_pip = module.run_command(cmd, path_prefix=path_prefix, cwd=this_dir)
out += out_pip
err += err_pip
if rc == 1 and state == 'absent' and 'not installed' in out_pip:
pass # rc is 1 when attempting to uninstall non-installed package
elif rc != 0:
_fail(module, cmd, out, err)
if state == 'absent':
changed = 'Successfully uninstalled' in out_pip
else:
changed = 'Successfully installed' in out_pip
module.exit_json(changed=changed, cmd=cmd, name=name, version=version,
state=state, requirements=requirements, virtualenv=env, stdout=out, stderr=err)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,396 +0,0 @@
#!/usr/bin/python
DOCUMENTATION = '''
---
module: redhat_subscription
short_description: Manage Red Hat Network registration and subscriptions using the C(subscription-manager) command
description:
- Manage registration and subscription to the Red Hat Network entitlement platform.
version_added: "1.2"
author: James Laska
notes:
- In order to register a system, subscription-manager requires either a username and password, or an activationkey.
requirements:
- subscription-manager
options:
state:
description:
- whether to register and subscribe (C(present)), or unregister (C(absent)) a system
required: false
choices: [ "present", "absent" ]
default: "present"
username:
description:
- Red Hat Network username
required: False
default: null
password:
description:
- Red Hat Network password
required: False
default: null
server_hostname:
description:
- Specify an alternative Red Hat Network server
required: False
default: Current value from C(/etc/rhsm/rhsm.conf) is the default
server_insecure:
description:
- Allow traffic over insecure http
required: False
default: Current value from C(/etc/rhsm/rhsm.conf) is the default
rhsm_baseurl:
description:
- Specify CDN baseurl
required: False
default: Current value from C(/etc/rhsm/rhsm.conf) is the default
autosubscribe:
description:
- Upon successful registration, auto-consume available subscriptions
required: False
default: False
activationkey:
description:
- supply an activation key for use with registration
required: False
default: null
pool:
description:
- Specify a subscription pool name to consume. Regular expressions accepted.
required: False
default: '^$'
'''
EXAMPLES = '''
# Register as user (joe_user) with password (somepass) and auto-subscribe to available content.
- redhat_subscription: action=register username=joe_user password=somepass autosubscribe=true
# Register with activationkey (1-222333444) and consume subscriptions matching
# the names (Red hat Enterprise Server) and (Red Hat Virtualization)
- redhat_subscription: action=register
activationkey=1-222333444
pool='^(Red Hat Enterprise Server|Red Hat Virtualization)$'
'''
import os
import re
import types
import ConfigParser
import shlex
class RegistrationBase(object):
def __init__(self, module, username=None, password=None):
self.module = module
self.username = username
self.password = password
def configure(self):
raise NotImplementedError("Must be implemented by a sub-class")
def enable(self):
# Remove any existing redhat.repo
redhat_repo = '/etc/yum.repos.d/redhat.repo'
if os.path.isfile(redhat_repo):
os.unlink(redhat_repo)
def register(self):
raise NotImplementedError("Must be implemented by a sub-class")
def unregister(self):
raise NotImplementedError("Must be implemented by a sub-class")
def unsubscribe(self):
raise NotImplementedError("Must be implemented by a sub-class")
def update_plugin_conf(self, plugin, enabled=True):
plugin_conf = '/etc/yum/pluginconf.d/%s.conf' % plugin
if os.path.isfile(plugin_conf):
cfg = ConfigParser.ConfigParser()
cfg.read([plugin_conf])
if enabled:
cfg.set('main', 'enabled', 1)
else:
cfg.set('main', 'enabled', 0)
fd = open(plugin_conf, 'rwa+')
cfg.write(fd)
fd.close()
def subscribe(self, **kwargs):
raise NotImplementedError("Must be implemented by a sub-class")
class Rhsm(RegistrationBase):
def __init__(self, module, username=None, password=None):
RegistrationBase.__init__(self, module, username, password)
self.config = self._read_config()
self.module = module
def _read_config(self, rhsm_conf='/etc/rhsm/rhsm.conf'):
'''
Load RHSM configuration from /etc/rhsm/rhsm.conf.
Returns:
* ConfigParser object
'''
# Read RHSM defaults ...
cp = ConfigParser.ConfigParser()
cp.read(rhsm_conf)
# Add support for specifying a default value w/o having to standup some configuration
# Yeah, I know this should be subclassed ... but, oh well
def get_option_default(self, key, default=''):
sect, opt = key.split('.', 1)
if self.has_section(sect) and self.has_option(sect, opt):
return self.get(sect, opt)
else:
return default
cp.get_option = types.MethodType(get_option_default, cp, ConfigParser.ConfigParser)
return cp
def enable(self):
'''
Enable the system to receive updates from subscription-manager.
This involves updating affected yum plugins and removing any
conflicting yum repositories.
'''
RegistrationBase.enable(self)
self.update_plugin_conf('rhnplugin', False)
self.update_plugin_conf('subscription-manager', True)
def configure(self, **kwargs):
'''
Configure the system as directed for registration with RHN
Raises:
* Exception - if error occurs while running command
'''
args = ['subscription-manager', 'config']
# Pass supplied **kwargs as parameters to subscription-manager. Ignore
# non-configuration parameters and replace '_' with '.'. For example,
# 'server_hostname' becomes '--system.hostname'.
for k,v in kwargs.items():
if re.search(r'^(system|rhsm)_', k):
args.append('--%s=%s' % (k.replace('_','.'), v))
self.module.run_command(args, check_rc=True)
@property
def is_registered(self):
'''
Determine whether the current system
Returns:
* Boolean - whether the current system is currently registered to
RHN.
'''
# Quick version...
if False:
return os.path.isfile('/etc/pki/consumer/cert.pem') and \
os.path.isfile('/etc/pki/consumer/key.pem')
args = ['subscription-manager', 'identity']
rc, stdout, stderr = self.module.run_command(args, check_rc=False)
if rc == 0:
return True
else:
return False
def register(self, username, password, autosubscribe, activationkey):
'''
Register the current system to the provided RHN server
Raises:
* Exception - if error occurs while running command
'''
args = ['subscription-manager', 'register']
# Generate command arguments
if activationkey:
args.append('--activationkey "%s"' % activationkey)
else:
if autosubscribe:
args.append('--autosubscribe')
if username:
args.extend(['--username', username])
if password:
args.extend(['--password', password])
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
def unsubscribe(self):
'''
Unsubscribe a system from all subscribed channels
Raises:
* Exception - if error occurs while running command
'''
args = ['subscription-manager', 'unsubscribe', '--all']
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
def unregister(self):
'''
Unregister a currently registered system
Raises:
* Exception - if error occurs while running command
'''
args = ['subscription-manager', 'unregister']
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
def subscribe(self, regexp):
'''
Subscribe current system to available pools matching the specified
regular expression
Raises:
* Exception - if error occurs while running command
'''
# Available pools ready for subscription
available_pools = RhsmPools(self.module)
for pool in available_pools.filter(regexp):
pool.subscribe()
class RhsmPool(object):
'''
Convenience class for housing subscription information
'''
def __init__(self, module, **kwargs):
self.module = module
for k,v in kwargs.items():
setattr(self, k, v)
def __str__(self):
return str(self.__getattribute__('_name'))
def subscribe(self):
args = "subscription-manager subscribe --pool %s" % self.PoolId
rc, stdout, stderr = self.module.run_command(args, check_rc=True)
if rc == 0:
return True
else:
return False
class RhsmPools(object):
"""
This class is used for manipulating pools subscriptions with RHSM
"""
def __init__(self, module):
self.module = module
self.products = self._load_product_list()
def __iter__(self):
return self.products.__iter__()
def _load_product_list(self):
"""
Loads list of all available pools for system in data structure
"""
args = "subscription-manager list --available"
rc, stdout, stderr = self.module.run_command(args, check_rc=True)
products = []
for line in stdout.split('\n'):
# Remove leading+trailing whitespace
line = line.strip()
# An empty line implies the end of a output group
if len(line) == 0:
continue
# If a colon ':' is found, parse
elif ':' in line:
(key, value) = line.split(':',1)
key = key.strip().replace(" ", "") # To unify
value = value.strip()
if key in ['ProductName', 'SubscriptionName']:
# Remember the name for later processing
products.append(RhsmPool(self.module, _name=value, key=value))
elif products:
# Associate value with most recently recorded product
products[-1].__setattr__(key, value)
# FIXME - log some warning?
#else:
# warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value))
return products
def filter(self, regexp='^$'):
'''
Return a list of RhsmPools whose name matches the provided regular expression
'''
r = re.compile(regexp)
for product in self.products:
if r.search(product._name):
yield product
def main():
# Load RHSM configuration from file
rhn = Rhsm(None)
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'absent']),
username = dict(default=None, required=False),
password = dict(default=None, required=False),
server_hostname = dict(default=rhn.config.get_option('server.hostname'), required=False),
server_insecure = dict(default=rhn.config.get_option('server.insecure'), required=False),
rhsm_baseurl = dict(default=rhn.config.get_option('rhsm.baseurl'), required=False),
autosubscribe = dict(default=False, type='bool'),
activationkey = dict(default=None, required=False),
pool = dict(default='^$', required=False, type='str'),
)
)
rhn.module = module
state = module.params['state']
username = module.params['username']
password = module.params['password']
server_hostname = module.params['server_hostname']
server_insecure = module.params['server_insecure']
rhsm_baseurl = module.params['rhsm_baseurl']
autosubscribe = module.params['autosubscribe'] == True
activationkey = module.params['activationkey']
pool = module.params['pool']
# Ensure system is registered
if state == 'present':
# Check for missing parameters ...
if not (activationkey or username or password):
module.fail_json(msg="Missing arguments, must supply an activationkey (%s) or username (%s) and password (%s)" % (activationkey, username, password))
if not activationkey and not (username and password):
module.fail_json(msg="Missing arguments, If registering without an activationkey, must supply username or password")
# Register system
if rhn.is_registered:
module.exit_json(changed=False, msg="System already registered.")
else:
try:
rhn.enable()
rhn.configure(**module.params)
rhn.register(username, password, autosubscribe, activationkey)
rhn.subscribe(pool)
except Exception, e:
module.fail_json(msg="Failed to register with '%s': %s" % (server_hostname, e))
else:
module.exit_json(changed=True, msg="System successfully registered to '%s'." % server_hostname)
# Ensure system is *not* registered
if state == 'absent':
if not rhn.is_registered:
module.exit_json(changed=False, msg="System already unregistered.")
else:
try:
rhn.unsubscribe()
rhn.unregister()
except Exception, e:
module.fail_json(msg="Failed to unregister: %s" % e)
else:
module.exit_json(changed=True, msg="System successfully unregistered from %s." % server_hostname)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,169 +0,0 @@
#!/usr/bin/python
# (c) Vincent Van de Kussen
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rhn_channel
short_description: Adds or removes Red Hat software channels
description:
- Adds or removes Red Hat software channels
version_added: "1.1"
author: Vincent Van der Kussen
notes:
- this module fetches the system id from RHN.
requirements:
- none
options:
name:
description:
- name of the software channel
required: true
default: null
sysname:
description:
- name of the system as it is known in RHN/Satellite
required: true
default: null
state:
description:
- whether the channel should be present or not
required: false
default: present
url:
description:
- The full url to the RHN/Satellite api
required: true
user:
description:
- RHN/Satellite user
required: true
password:
description:
- "the user's password"
required: true
'''
EXAMPLES = '''
- rhn_channel: name=rhel-x86_64-server-v2vwin-6 sysname=server01 url=https://rhn.redhat.com/rpc/api user=rhnuser password=guessme
'''
import xmlrpclib
from operator import itemgetter
import re
# ------------------------------------------------------- #
def get_systemid(client, session, sysname):
systems = client.system.listUserSystems(session)
for system in systems:
if system.get('name') == sysname:
idres = system.get('id')
idd = int(idres)
return idd
# ------------------------------------------------------- #
# unused:
#
#def get_localsystemid():
# f = open("/etc/sysconfig/rhn/systemid", "r")
# content = f.read()
# loc_id = re.search(r'\b(ID-)(\d{10})' ,content)
# return loc_id.group(2)
# ------------------------------------------------------- #
def subscribe_channels(channels, client, session, sysname, sys_id):
c = base_channels(client, session, sys_id)
c.append(channels)
return client.channel.software.setSystemChannels(session, sys_id, c)
# ------------------------------------------------------- #
def unsubscribe_channels(channels, client, session, sysname, sys_id):
c = base_channels(client, session, sys_id)
c.remove(channels)
return client.channel.software.setSystemChannels(session, sys_id, c)
# ------------------------------------------------------- #
def base_channels(client, session, sys_id):
basechan = client.channel.software.listSystemChannels(session, sys_id)
try:
chans = [item['label'] for item in basechan]
except KeyError:
chans = [item['channel_label'] for item in basechan]
return chans
# ------------------------------------------------------- #
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'absent']),
name = dict(required=True),
sysname = dict(required=True),
url = dict(required=True),
user = dict(required=True),
password = dict(required=True, aliases=['pwd']),
)
# supports_check_mode=True
)
state = module.params['state']
channelname = module.params['name']
systname = module.params['sysname']
saturl = module.params['url']
user = module.params['user']
password = module.params['password']
#initialize connection
client = xmlrpclib.Server(saturl, verbose=0)
session = client.auth.login(user, password)
# get systemid
sys_id = get_systemid(client, session, systname)
# get channels for system
chans = base_channels(client, session, sys_id)
if state == 'present':
if channelname in chans:
module.exit_json(changed=False, msg="Channel %s already exists" % channelname)
else:
subscribe_channels(channelname, client, session, systname, sys_id)
module.exit_json(changed=True, msg="Channel %s added" % channelname)
if state == 'absent':
if not channelname in chans:
module.exit_json(changed=False, msg="Not subscribed to channel %s." % channelname)
else:
unsubscribe_channels(channelname, client, session, systname, sys_id)
module.exit_json(changed=True, msg="Channel %s removed" % channelname)
client.auth.logout(session)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,336 +0,0 @@
#!/usr/bin/python
# (c) James Laska
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rhn_register
short_description: Manage Red Hat Network registration using the C(rhnreg_ks) command
description:
- Manage registration to the Red Hat Network.
version_added: "1.2"
author: James Laska
notes:
- In order to register a system, rhnreg_ks requires either a username and password, or an activationkey.
requirements:
- rhnreg_ks
options:
state:
description:
- whether to register (C(present)), or unregister (C(absent)) a system
required: false
choices: [ "present", "absent" ]
default: "present"
username:
description:
- Red Hat Network username
required: False
default: null
password:
description:
- Red Hat Network password
required: False
default: null
server_url:
description:
- Specify an alternative Red Hat Network server URL
required: False
default: Current value of I(serverURL) from C(/etc/sysconfig/rhn/up2date) is the default
activationkey:
description:
- supply an activation key for use with registration
required: False
default: null
channels:
description:
- Optionally specify a list of comma-separated channels to subscribe to upon successful registration.
required: false
default: []
'''
EXAMPLES = '''
# Unregister system from RHN.
- rhn_register: state=absent username=joe_user password=somepass
# Register as user (joe_user) with password (somepass) and auto-subscribe to available content.
- rhn_register: state=present username=joe_user password=somepass
# Register with activationkey (1-222333444) and enable extended update support.
- rhn_register: state=present activationkey=1-222333444 enable_eus=true
# Register as user (joe_user) with password (somepass) against a satellite
# server specified by (server_url).
- rhn_register: >
state=present
username=joe_user
password=somepass
server_url=https://xmlrpc.my.satellite/XMLRPC
# Register as user (joe_user) with password (somepass) and enable
# channels (rhel-x86_64-server-6-foo-1) and (rhel-x86_64-server-6-bar-1).
- rhn_register: state=present username=joe_user
password=somepass
channels=rhel-x86_64-server-6-foo-1,rhel-x86_64-server-6-bar-1
'''
import sys
import types
import xmlrpclib
import urlparse
# Attempt to import rhn client tools
sys.path.insert(0, '/usr/share/rhn')
try:
import up2date_client
import up2date_client.config
except ImportError, e:
module.fail_json(msg="Unable to import up2date_client. Is 'rhn-client-tools' installed?\n%s" % e)
# INSERT REDHAT SNIPPETS
from ansible.module_utils.redhat import *
# INSERT COMMON SNIPPETS
from ansible.module_utils.basic import *
class Rhn(RegistrationBase):
def __init__(self, username=None, password=None):
RegistrationBase.__init__(self, username, password)
self.config = self.load_config()
def load_config(self):
'''
Read configuration from /etc/sysconfig/rhn/up2date
'''
self.config = up2date_client.config.initUp2dateConfig()
# Add support for specifying a default value w/o having to standup some
# configuration. Yeah, I know this should be subclassed ... but, oh
# well
def get_option_default(self, key, default=''):
# ignore pep8 W601 errors for this line
# setting this to use 'in' does not work in the rhn library
if self.has_key(key):
return self[key]
else:
return default
self.config.get_option = types.MethodType(get_option_default, self.config, up2date_client.config.Config)
return self.config
@property
def hostname(self):
'''
Return the non-xmlrpc RHN hostname. This is a convenience method
used for displaying a more readable RHN hostname.
Returns: str
'''
url = urlparse.urlparse(self.config['serverURL'])
return url[1].replace('xmlrpc.','')
@property
def systemid(self):
systemid = None
xpath_str = "//member[name='system_id']/value/string"
if os.path.isfile(self.config['systemIdPath']):
fd = open(self.config['systemIdPath'], 'r')
xml_data = fd.read()
fd.close()
# Ugh, xml parsing time ...
# First, try parsing with libxml2 ...
if systemid is None:
try:
import libxml2
doc = libxml2.parseDoc(xml_data)
ctxt = doc.xpathNewContext()
systemid = ctxt.xpathEval(xpath_str)[0].content
doc.freeDoc()
ctxt.xpathFreeContext()
except ImportError:
pass
# m-kay, let's try with lxml now ...
if systemid is None:
try:
from lxml import etree
root = etree.fromstring(xml_data)
systemid = root.xpath(xpath_str)[0].text
except ImportError:
pass
# Strip the 'ID-' prefix
if systemid is not None and systemid.startswith('ID-'):
systemid = systemid[3:]
return int(systemid)
@property
def is_registered(self):
'''
Determine whether the current system is registered.
Returns: True|False
'''
return os.path.isfile(self.config['systemIdPath'])
def configure(self, server_url):
'''
Configure system for registration
'''
self.config.set('serverURL', server_url)
self.config.save()
def enable(self):
'''
Prepare the system for RHN registration. This includes ...
* enabling the rhnplugin yum plugin
* disabling the subscription-manager yum plugin
'''
RegistrationBase.enable(self)
self.update_plugin_conf('rhnplugin', True)
self.update_plugin_conf('subscription-manager', False)
def register(self, enable_eus=False, activationkey=None):
'''
Register system to RHN. If enable_eus=True, extended update
support will be requested.
'''
register_cmd = "/usr/sbin/rhnreg_ks --username='%s' --password='%s' --force" % (self.username, self.password)
if self.module.params.get('server_url', None):
register_cmd += " --serverUrl=%s" % self.module.params.get('server_url')
if enable_eus:
register_cmd += " --use-eus-channel"
if activationkey is not None:
register_cmd += " --activationkey '%s'" % activationkey
# FIXME - support --profilename
# FIXME - support --systemorgid
rc, stdout, stderr = self.module.run_command(register_cmd, check_rc=True, use_unsafe_shell=True)
def api(self, method, *args):
'''
Convenience RPC wrapper
'''
if not hasattr(self, 'server') or self.server is None:
if self.hostname != 'rhn.redhat.com':
url = "https://%s/rpc/api" % self.hostname
else:
url = "https://xmlrpc.%s/rpc/api" % self.hostname
self.server = xmlrpclib.Server(url, verbose=0)
self.session = self.server.auth.login(self.username, self.password)
func = getattr(self.server, method)
return func(self.session, *args)
def unregister(self):
'''
Unregister a previously registered system
'''
# Initiate RPC connection
self.api('system.deleteSystems', [self.systemid])
# Remove systemid file
os.unlink(self.config['systemIdPath'])
def subscribe(self, channels=[]):
if len(channels) <= 0:
return
current_channels = self.api('channel.software.listSystemChannels', self.systemid)
new_channels = [item['channel_label'] for item in current_channels]
new_channels.extend(channels)
return self.api('channel.software.setSystemChannels', self.systemid, new_channels)
def _subscribe(self, channels=[]):
'''
Subscribe to requested yum repositories using 'rhn-channel' command
'''
rhn_channel_cmd = "rhn-channel --user='%s' --password='%s'" % (self.username, self.password)
rc, stdout, stderr = self.module.run_command(rhn_channel_cmd + " --available-channels", check_rc=True)
# Enable requested repoid's
for wanted_channel in channels:
# Each inserted repo regexp will be matched. If no match, no success.
for available_channel in stdout.rstrip().split('\n'): # .rstrip() because of \n at the end -> empty string at the end
if re.search(wanted_repo, available_channel):
rc, stdout, stderr = self.module.run_command(rhn_channel_cmd + " --add --channel=%s" % available_channel, check_rc=True)
def main():
# Read system RHN configuration
rhn = Rhn()
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'absent']),
username = dict(default=None, required=False),
password = dict(default=None, required=False),
server_url = dict(default=rhn.config.get_option('serverURL'), required=False),
activationkey = dict(default=None, required=False),
enable_eus = dict(default=False, type='bool'),
channels = dict(default=[], type='list'),
)
)
state = module.params['state']
rhn.username = module.params['username']
rhn.password = module.params['password']
rhn.configure(module.params['server_url'])
activationkey = module.params['activationkey']
channels = module.params['channels']
rhn.module = module
# Ensure system is registered
if state == 'present':
# Check for missing parameters ...
if not (activationkey or rhn.username or rhn.password):
module.fail_json(msg="Missing arguments, must supply an activationkey (%s) or username (%s) and password (%s)" % (activationkey, rhn.username, rhn.password))
if not activationkey and not (rhn.username and rhn.password):
module.fail_json(msg="Missing arguments, If registering without an activationkey, must supply username or password")
# Register system
if rhn.is_registered:
module.exit_json(changed=False, msg="System already registered.")
else:
try:
rhn.enable()
rhn.register(module.params['enable_eus'] == True, activationkey)
rhn.subscribe(channels)
except Exception, e:
module.fail_json(msg="Failed to register with '%s': %s" % (rhn.hostname, e))
module.exit_json(changed=True, msg="System successfully registered to '%s'." % rhn.hostname)
# Ensure system is *not* registered
if state == 'absent':
if not rhn.is_registered:
module.exit_json(changed=False, msg="System already unregistered.")
else:
try:
rhn.unregister()
except Exception, e:
module.fail_json(msg="Failed to unregister: %s" % e)
module.exit_json(changed=True, msg="System successfully unregistered from %s." % rhn.hostname)
main()

@ -1,206 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Ansible module to import third party repo keys to your rpm db
# (c) 2013, Héctor Acosta <hector.acosta@gazzang.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rpm_key
author: Hector Acosta <hector.acosta@gazzang.com>
short_description: Adds or removes a gpg key from the rpm db
description:
- Adds or removes (rpm --import) a gpg key to your rpm database.
version_added: "1.3"
options:
key:
required: true
default: null
aliases: []
description:
- Key that will be modified. Can be a url, a file, or a keyid if the key already exists in the database.
state:
required: false
default: "present"
choices: [present, absent]
description:
- Wheather the key will be imported or removed from the rpm db.
validate_certs:
description:
- If C(no) and the C(key) is a url starting with https, SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
'''
EXAMPLES = '''
# Example action to import a key from a url
- rpm_key: state=present key=http://apt.sw.be/RPM-GPG-KEY.dag.txt
# Example action to import a key from a file
- rpm_key: state=present key=/path/to/key.gpg
# Example action to ensure a key is not present in the db
- rpm_key: state=absent key=DEADB33F
'''
import syslog
import os.path
import re
import tempfile
def is_pubkey(string):
"""Verifies if string is a pubkey"""
pgp_regex = ".*?(-----BEGIN PGP PUBLIC KEY BLOCK-----.*?-----END PGP PUBLIC KEY BLOCK-----).*"
return re.match(pgp_regex, string, re.DOTALL)
class RpmKey:
def __init__(self, module):
self.syslogging = False
# If the key is a url, we need to check if it's present to be idempotent,
# to do that, we need to check the keyid, which we can get from the armor.
keyfile = None
should_cleanup_keyfile = False
self.module = module
self.rpm = self.module.get_bin_path('rpm', True)
state = module.params['state']
key = module.params['key']
if '://' in key:
keyfile = self.fetch_key(key)
keyid = self.getkeyid(keyfile)
should_cleanup_keyfile = True
elif self.is_keyid(key):
keyid = key
elif os.path.isfile(key):
keyfile = key
keyid = self.getkeyid(keyfile)
else:
self.module.fail_json(msg="Not a valid key %s" % key)
keyid = self.normalize_keyid(keyid)
if state == 'present':
if self.is_key_imported(keyid):
module.exit_json(changed=False)
else:
if not keyfile:
self.module.fail_json(msg="When importing a key, a valid file must be given")
self.import_key(keyfile, dryrun=module.check_mode)
if should_cleanup_keyfile:
self.module.cleanup(keyfile)
module.exit_json(changed=True)
else:
if self.is_key_imported(keyid):
self.drop_key(keyid, dryrun=module.check_mode)
module.exit_json(changed=True)
else:
module.exit_json(changed=False)
def fetch_key(self, url):
"""Downloads a key from url, returns a valid path to a gpg key"""
try:
rsp, info = fetch_url(self.module, url)
key = rsp.read()
if not is_pubkey(key):
self.module.fail_json(msg="Not a public key: %s" % url)
tmpfd, tmpname = tempfile.mkstemp()
tmpfile = os.fdopen(tmpfd, "w+b")
tmpfile.write(key)
tmpfile.close()
return tmpname
except urllib2.URLError, e:
self.module.fail_json(msg=str(e))
def normalize_keyid(self, keyid):
"""Ensure a keyid doesn't have a leading 0x, has leading or trailing whitespace, and make sure is lowercase"""
ret = keyid.strip().lower()
if ret.startswith('0x'):
return ret[2:]
elif ret.startswith('0X'):
return ret[2:]
else:
return ret
def getkeyid(self, keyfile):
gpg = self.module.get_bin_path('gpg', True)
stdout, stderr = self.execute_command([gpg, '--no-tty', '--batch', '--with-colons', '--fixed-list-mode', '--list-packets', keyfile])
for line in stdout.splitlines():
line = line.strip()
if line.startswith(':signature packet:'):
# We want just the last 8 characters of the keyid
keyid = line.split()[-1].strip()[8:]
return keyid
self.json_fail(msg="Unexpected gpg output")
def is_keyid(self, keystr):
"""Verifies if a key, as provided by the user is a keyid"""
return re.match('(0x)?[0-9a-f]{8}', keystr, flags=re.IGNORECASE)
def execute_command(self, cmd):
if self.syslogging:
syslog.openlog('ansible-%s' % os.path.basename(__file__))
syslog.syslog(syslog.LOG_NOTICE, 'Command %s' % '|'.join(cmd))
rc, stdout, stderr = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg=stderr)
return stdout, stderr
def is_key_imported(self, keyid):
stdout, stderr = self.execute_command([self.rpm, '-qa', 'gpg-pubkey'])
for line in stdout.splitlines():
line = line.strip()
if not line:
continue
match = re.match('gpg-pubkey-([0-9a-f]+)-([0-9a-f]+)', line)
if not match:
self.module.fail_json(msg="rpm returned unexpected output [%s]" % line)
else:
if keyid == match.group(1):
return True
return False
def import_key(self, keyfile, dryrun=False):
if not dryrun:
self.execute_command([self.rpm, '--import', keyfile])
def drop_key(self, key, dryrun=False):
if not dryrun:
self.execute_command([self.rpm, '--erase', '--allmatches', "gpg-pubkey-%s" % key])
def main():
module = AnsibleModule(
argument_spec = dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
key=dict(required=True, type='str'),
validate_certs=dict(default='yes', type='bool'),
),
supports_check_mode=True
)
RpmKey(module)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()

@ -1,838 +0,0 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2012, Red Hat, Inc
# Written by Seth Vidal <skvidal at fedoraproject.org>
# (c) 2014, Epic Games, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import traceback
import os
import yum
try:
from yum.misc import find_unfinished_transactions, find_ts_remaining
from rpmUtils.miscutils import splitFilename
transaction_helpers = True
except:
transaction_helpers = False
DOCUMENTATION = '''
---
module: yum
version_added: historical
short_description: Manages packages with the I(yum) package manager
description:
- Installs, upgrade, removes, and lists packages and groups with the I(yum) package manager.
options:
name:
description:
- "Package name, or package specifier with version, like C(name-1.0). When using state=latest, this can be '*' which means run: yum -y update. You can also pass a url or a local path to a rpm file."
required: true
default: null
aliases: []
list:
description:
- Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. See examples.
required: false
default: null
state:
description:
- Whether to install (C(present), C(latest)), or remove (C(absent)) a package.
required: false
choices: [ "present", "latest", "absent" ]
default: "present"
enablerepo:
description:
- I(Repoid) of repositories to enable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
required: false
version_added: "0.9"
default: null
aliases: []
disablerepo:
description:
- I(Repoid) of repositories to disable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
required: false
version_added: "0.9"
default: null
aliases: []
conf_file:
description:
- The remote yum configuration file to use for the transaction.
required: false
version_added: "0.6"
default: null
aliases: []
disable_gpg_check:
description:
- Whether to disable the GPG checking of signatures of packages being
installed. Has an effect only if state is I(present) or I(latest).
required: false
version_added: "1.2"
default: "no"
choices: ["yes", "no"]
aliases: []
notes: []
# informational: requirements for nodes
requirements: [ yum, rpm ]
author: Seth Vidal
'''
EXAMPLES = '''
- name: install the latest version of Apache
yum: name=httpd state=latest
- name: remove the Apache package
yum: name=httpd state=absent
- name: install the latest version of Apache from the testing repo
yum: name=httpd enablerepo=testing state=present
- name: upgrade all packages
yum: name=* state=latest
- name: install the nginx rpm from a remote repo
yum: name=http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm state=present
- name: install nginx rpm from a local file
yum: name=/usr/local/src/nginx-release-centos-6-0.el6.ngx.noarch.rpm state=present
- name: install the 'Development tools' package group
yum: name="@Development tools" state=present
'''
def_qf = "%{name}-%{version}-%{release}.%{arch}"
repoquery='/usr/bin/repoquery'
if not os.path.exists(repoquery):
repoquery = None
yumbin='/usr/bin/yum'
import syslog
def log(msg):
syslog.openlog('ansible-yum', 0, syslog.LOG_USER)
syslog.syslog(syslog.LOG_NOTICE, msg)
def yum_base(conf_file=None, cachedir=False):
my = yum.YumBase()
my.preconf.debuglevel=0
my.preconf.errorlevel=0
if conf_file and os.path.exists(conf_file):
my.preconf.fn = conf_file
if cachedir or os.geteuid() != 0:
if hasattr(my, 'setCacheDir'):
my.setCacheDir()
else:
cachedir = yum.misc.getCacheDir()
my.repos.setCacheDir(cachedir)
my.conf.cache = 0
return my
def install_yum_utils(module):
if not module.check_mode:
yum_path = module.get_bin_path('yum')
if yum_path:
rc, so, se = module.run_command('%s -y install yum-utils' % yum_path)
if rc == 0:
this_path = module.get_bin_path('repoquery')
global repoquery
repoquery = this_path
def po_to_nevra(po):
if hasattr(po, 'ui_nevra'):
return po.ui_nevra
else:
return '%s-%s-%s.%s' % (po.name, po.version, po.release, po.arch)
def is_installed(module, repoq, pkgspec, conf_file, qf=def_qf, en_repos=[], dis_repos=[], is_pkg=False):
if not repoq:
pkgs = []
try:
my = yum_base(conf_file)
for rid in en_repos:
my.repos.enableRepo(rid)
for rid in dis_repos:
my.repos.disableRepo(rid)
e,m,u = my.rpmdb.matchPackageNames([pkgspec])
pkgs = e + m
if not pkgs:
pkgs.extend(my.returnInstalledPackagesByDep(pkgspec))
except Exception, e:
module.fail_json(msg="Failure talking to yum: %s" % e)
return [ po_to_nevra(p) for p in pkgs ]
else:
cmd = repoq + ["--disablerepo=*", "--pkgnarrow=installed", "--qf", qf, pkgspec]
rc,out,err = module.run_command(cmd)
if not is_pkg:
cmd = repoq + ["--disablerepo=*", "--pkgnarrow=installed", "--qf", qf, "--whatprovides", pkgspec]
rc2,out2,err2 = module.run_command(cmd)
else:
rc2,out2,err2 = (0, '', '')
if rc == 0 and rc2 == 0:
out += out2
return [ p for p in out.split('\n') if p.strip() ]
else:
module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err + err2))
return []
def is_available(module, repoq, pkgspec, conf_file, qf=def_qf, en_repos=[], dis_repos=[]):
if not repoq:
pkgs = []
try:
my = yum_base(conf_file)
for rid in en_repos:
my.repos.enableRepo(rid)
for rid in dis_repos:
my.repos.disableRepo(rid)
e,m,u = my.pkgSack.matchPackageNames([pkgspec])
pkgs = e + m
if not pkgs:
pkgs.extend(my.returnPackagesByDep(pkgspec))
except Exception, e:
module.fail_json(msg="Failure talking to yum: %s" % e)
return [ po_to_nevra(p) for p in pkgs ]
else:
myrepoq = list(repoq)
for repoid in dis_repos:
r_cmd = ['--disablerepo', repoid]
myrepoq.extend(r_cmd)
for repoid in en_repos:
r_cmd = ['--enablerepo', repoid]
myrepoq.extend(r_cmd)
cmd = myrepoq + ["--qf", qf, pkgspec]
rc,out,err = module.run_command(cmd)
if rc == 0:
return [ p for p in out.split('\n') if p.strip() ]
else:
module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err))
return []
def is_update(module, repoq, pkgspec, conf_file, qf=def_qf, en_repos=[], dis_repos=[]):
if not repoq:
retpkgs = []
pkgs = []
updates = []
try:
my = yum_base(conf_file)
for rid in en_repos:
my.repos.enableRepo(rid)
for rid in dis_repos:
my.repos.disableRepo(rid)
pkgs = my.returnPackagesByDep(pkgspec) + my.returnInstalledPackagesByDep(pkgspec)
if not pkgs:
e,m,u = my.pkgSack.matchPackageNames([pkgspec])
pkgs = e + m
updates = my.doPackageLists(pkgnarrow='updates').updates
except Exception, e:
module.fail_json(msg="Failure talking to yum: %s" % e)
for pkg in pkgs:
if pkg in updates:
retpkgs.append(pkg)
return set([ po_to_nevra(p) for p in retpkgs ])
else:
myrepoq = list(repoq)
for repoid in dis_repos:
r_cmd = ['--disablerepo', repoid]
myrepoq.extend(r_cmd)
for repoid in en_repos:
r_cmd = ['--enablerepo', repoid]
myrepoq.extend(r_cmd)
cmd = myrepoq + ["--pkgnarrow=updates", "--qf", qf, pkgspec]
rc,out,err = module.run_command(cmd)
if rc == 0:
return set([ p for p in out.split('\n') if p.strip() ])
else:
module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err))
return []
def what_provides(module, repoq, req_spec, conf_file, qf=def_qf, en_repos=[], dis_repos=[]):
if not repoq:
pkgs = []
try:
my = yum_base(conf_file)
for rid in en_repos:
my.repos.enableRepo(rid)
for rid in dis_repos:
my.repos.disableRepo(rid)
pkgs = my.returnPackagesByDep(req_spec) + my.returnInstalledPackagesByDep(req_spec)
if not pkgs:
e,m,u = my.pkgSack.matchPackageNames([req_spec])
pkgs.extend(e)
pkgs.extend(m)
e,m,u = my.rpmdb.matchPackageNames([req_spec])
pkgs.extend(e)
pkgs.extend(m)
except Exception, e:
module.fail_json(msg="Failure talking to yum: %s" % e)
return set([ po_to_nevra(p) for p in pkgs ])
else:
myrepoq = list(repoq)
for repoid in dis_repos:
r_cmd = ['--disablerepo', repoid]
myrepoq.extend(r_cmd)
for repoid in en_repos:
r_cmd = ['--enablerepo', repoid]
myrepoq.extend(r_cmd)
cmd = myrepoq + ["--qf", qf, "--whatprovides", req_spec]
rc,out,err = module.run_command(cmd)
cmd = myrepoq + ["--qf", qf, req_spec]
rc2,out2,err2 = module.run_command(cmd)
if rc == 0 and rc2 == 0:
out += out2
pkgs = set([ p for p in out.split('\n') if p.strip() ])
if not pkgs:
pkgs = is_installed(module, repoq, req_spec, conf_file, qf=qf)
return pkgs
else:
module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err + err2))
return []
def transaction_exists(pkglist):
"""
checks the package list to see if any packages are
involved in an incomplete transaction
"""
conflicts = []
if not transaction_helpers:
return conflicts
# first, we create a list of the package 'nvreas'
# so we can compare the pieces later more easily
pkglist_nvreas = []
for pkg in pkglist:
pkglist_nvreas.append(splitFilename(pkg))
# next, we build the list of packages that are
# contained within an unfinished transaction
unfinished_transactions = find_unfinished_transactions()
for trans in unfinished_transactions:
steps = find_ts_remaining(trans)
for step in steps:
# the action is install/erase/etc., but we only
# care about the package spec contained in the step
(action, step_spec) = step
(n,v,r,e,a) = splitFilename(step_spec)
# and see if that spec is in the list of packages
# requested for installation/updating
for pkg in pkglist_nvreas:
# if the name and arch match, we're going to assume
# this package is part of a pending transaction
# the label is just for display purposes
label = "%s-%s" % (n,a)
if n == pkg[0] and a == pkg[4]:
if label not in conflicts:
conflicts.append("%s-%s" % (n,a))
break
return conflicts
def local_nvra(module, path):
"""return nvra of a local rpm passed in"""
cmd = ['/bin/rpm', '-qp' ,'--qf',
'%{name}-%{version}-%{release}.%{arch}\n', path ]
rc, out, err = module.run_command(cmd)
if rc != 0:
return None
nvra = out.split('\n')[0]
return nvra
def pkg_to_dict(pkgstr):
if pkgstr.strip():
n,e,v,r,a,repo = pkgstr.split('|')
else:
return {'error_parsing': pkgstr}
d = {
'name':n,
'arch':a,
'epoch':e,
'release':r,
'version':v,
'repo':repo,
'nevra': '%s:%s-%s-%s.%s' % (e,n,v,r,a)
}
if repo == 'installed':
d['yumstate'] = 'installed'
else:
d['yumstate'] = 'available'
return d
def repolist(module, repoq, qf="%{repoid}"):
cmd = repoq + ["--qf", qf, "-a"]
rc,out,err = module.run_command(cmd)
ret = []
if rc == 0:
ret = set([ p for p in out.split('\n') if p.strip() ])
return ret
def list_stuff(module, conf_file, stuff):
qf = "%{name}|%{epoch}|%{version}|%{release}|%{arch}|%{repoid}"
repoq = [repoquery, '--show-duplicates', '--plugins', '--quiet', '-q']
if conf_file and os.path.exists(conf_file):
repoq += ['-c', conf_file]
if stuff == 'installed':
return [ pkg_to_dict(p) for p in is_installed(module, repoq, '-a', conf_file, qf=qf) if p.strip() ]
elif stuff == 'updates':
return [ pkg_to_dict(p) for p in is_update(module, repoq, '-a', conf_file, qf=qf) if p.strip() ]
elif stuff == 'available':
return [ pkg_to_dict(p) for p in is_available(module, repoq, '-a', conf_file, qf=qf) if p.strip() ]
elif stuff == 'repos':
return [ dict(repoid=name, state='enabled') for name in repolist(module, repoq) if name.strip() ]
else:
return [ pkg_to_dict(p) for p in is_installed(module, repoq, stuff, conf_file, qf=qf) + is_available(module, repoq, stuff, conf_file, qf=qf) if p.strip() ]
def install(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos):
res = {}
res['results'] = []
res['msg'] = ''
res['rc'] = 0
res['changed'] = False
for spec in items:
pkg = None
# check if pkgspec is installed (if possible for idempotence)
# localpkg
if spec.endswith('.rpm') and '://' not in spec:
# get the pkg name-v-r.arch
if not os.path.exists(spec):
res['msg'] += "No Package file matching '%s' found on system" % spec
module.fail_json(**res)
nvra = local_nvra(module, spec)
# look for them in the rpmdb
if is_installed(module, repoq, nvra, conf_file, en_repos=en_repos, dis_repos=dis_repos):
# if they are there, skip it
continue
pkg = spec
# URL
elif '://' in spec:
pkg = spec
#groups :(
elif spec.startswith('@'):
# complete wild ass guess b/c it's a group
pkg = spec
# range requires or file-requires or pkgname :(
else:
# most common case is the pkg is already installed and done
# short circuit all the bs - and search for it as a pkg in is_installed
# if you find it then we're done
if not set(['*','?']).intersection(set(spec)):
pkgs = is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos, is_pkg=True)
if pkgs:
res['results'].append('%s providing %s is already installed' % (pkgs[0], spec))
continue
# look up what pkgs provide this
pkglist = what_provides(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos)
if not pkglist:
res['msg'] += "No Package matching '%s' found available, installed or updated" % spec
module.fail_json(**res)
# if any of the packages are involved in a transaction, fail now
# so that we don't hang on the yum operation later
conflicts = transaction_exists(pkglist)
if len(conflicts) > 0:
res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts)
module.fail_json(**res)
# if any of them are installed
# then nothing to do
found = False
for this in pkglist:
if is_installed(module, repoq, this, conf_file, en_repos=en_repos, dis_repos=dis_repos, is_pkg=True):
found = True
res['results'].append('%s providing %s is already installed' % (this, spec))
break
# if the version of the pkg you have installed is not in ANY repo, but there are
# other versions in the repos (both higher and lower) then the previous checks won't work.
# so we check one more time. This really only works for pkgname - not for file provides or virt provides
# but virt provides should be all caught in what_provides on its own.
# highly irritating
if not found:
if is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos):
found = True
res['results'].append('package providing %s is already installed' % (spec))
if found:
continue
# if not - then pass in the spec as what to install
# we could get here if nothing provides it but that's not
# the error we're catching here
pkg = spec
cmd = yum_basecmd + ['install', pkg]
if module.check_mode:
module.exit_json(changed=True)
changed = True
rc, out, err = module.run_command(cmd)
# Fail on invalid urls:
if (rc == 1 and '://' in spec and ('No package %s available.' % spec in out or 'Cannot open: %s. Skipping.' % spec in err)):
err = 'Package at %s could not be installed' % spec
module.fail_json(changed=False,msg=err,rc=1)
elif (rc != 0 and 'Nothing to do' in err) or 'Nothing to do' in out:
# avoid failing in the 'Nothing To Do' case
# this may happen with an URL spec.
# for an already installed group,
# we get rc = 0 and 'Nothing to do' in out, not in err.
rc = 0
err = ''
out = '%s: Nothing to do' % spec
changed = False
res['rc'] += rc
res['results'].append(out)
res['msg'] += err
# FIXME - if we did an install - go and check the rpmdb to see if it actually installed
# look for the pkg in rpmdb
# look for the pkg via obsoletes
# accumulate any changes
res['changed'] |= changed
module.exit_json(**res)
def remove(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos):
res = {}
res['results'] = []
res['msg'] = ''
res['changed'] = False
res['rc'] = 0
for pkg in items:
is_group = False
# group remove - this is doom on a stick
if pkg.startswith('@'):
is_group = True
else:
if not is_installed(module, repoq, pkg, conf_file, en_repos=en_repos, dis_repos=dis_repos):
res['results'].append('%s is not installed' % pkg)
continue
# run an actual yum transaction
cmd = yum_basecmd + ["remove", pkg]
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command(cmd)
res['rc'] += rc
res['results'].append(out)
res['msg'] += err
# compile the results into one batch. If anything is changed
# then mark changed
# at the end - if we've end up failed then fail out of the rest
# of the process
# at this point we should check to see if the pkg is no longer present
if not is_group: # we can't sensibly check for a group being uninstalled reliably
# look to see if the pkg shows up from is_installed. If it doesn't
if not is_installed(module, repoq, pkg, conf_file, en_repos=en_repos, dis_repos=dis_repos):
res['changed'] = True
else:
module.fail_json(**res)
if rc != 0:
module.fail_json(**res)
module.exit_json(**res)
def latest(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos):
res = {}
res['results'] = []
res['msg'] = ''
res['changed'] = False
res['rc'] = 0
for spec in items:
pkg = None
basecmd = 'update'
cmd = ''
# groups, again
if spec.startswith('@'):
pkg = spec
elif spec == '*': #update all
# use check-update to see if there is any need
rc,out,err = module.run_command(yum_basecmd + ['check-update'])
if rc == 100:
cmd = yum_basecmd + [basecmd]
else:
res['results'].append('All packages up to date')
continue
# dep/pkgname - find it
else:
if is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos):
basecmd = 'update'
else:
basecmd = 'install'
pkglist = what_provides(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos)
if not pkglist:
res['msg'] += "No Package matching '%s' found available, installed or updated" % spec
module.fail_json(**res)
nothing_to_do = True
for this in pkglist:
if basecmd == 'install' and is_available(module, repoq, this, conf_file, en_repos=en_repos, dis_repos=dis_repos):
nothing_to_do = False
break
if basecmd == 'update' and is_update(module, repoq, this, conf_file, en_repos=en_repos, dis_repos=dis_repos):
nothing_to_do = False
break
if nothing_to_do:
res['results'].append("All packages providing %s are up to date" % spec)
continue
# if any of the packages are involved in a transaction, fail now
# so that we don't hang on the yum operation later
conflicts = transaction_exists(pkglist)
if len(conflicts) > 0:
res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts)
module.fail_json(**res)
pkg = spec
if not cmd:
cmd = yum_basecmd + [basecmd, pkg]
if module.check_mode:
return module.exit_json(changed=True)
rc, out, err = module.run_command(cmd)
res['rc'] += rc
res['results'].append(out)
res['msg'] += err
# FIXME if it is - update it and check to see if it applied
# check to see if there is no longer an update available for the pkgspec
if rc:
res['failed'] = True
else:
res['changed'] = True
module.exit_json(**res)
def ensure(module, state, pkgspec, conf_file, enablerepo, disablerepo,
disable_gpg_check):
# take multiple args comma separated
items = pkgspec.split(',')
# need debug level 2 to get 'Nothing to do' for groupinstall.
yum_basecmd = [yumbin, '-d', '2', '-y']
if not repoquery:
repoq = None
else:
repoq = [repoquery, '--show-duplicates', '--plugins', '--quiet', '-q']
if conf_file and os.path.exists(conf_file):
yum_basecmd += ['-c', conf_file]
if repoq:
repoq += ['-c', conf_file]
dis_repos =[]
en_repos = []
if disablerepo:
dis_repos = disablerepo.split(',')
if enablerepo:
en_repos = enablerepo.split(',')
for repoid in dis_repos:
r_cmd = ['--disablerepo=%s' % repoid]
yum_basecmd.extend(r_cmd)
for repoid in en_repos:
r_cmd = ['--enablerepo=%s' % repoid]
yum_basecmd.extend(r_cmd)
if state in ['installed', 'present', 'latest']:
my = yum_base(conf_file)
try:
for r in dis_repos:
my.repos.disableRepo(r)
current_repos = my.repos.repos.keys()
for r in en_repos:
try:
my.repos.enableRepo(r)
new_repos = my.repos.repos.keys()
for i in new_repos:
if not i in current_repos:
rid = my.repos.getRepo(i)
a = rid.repoXML.repoid
current_repos = new_repos
except yum.Errors.YumBaseError, e:
module.fail_json(msg="Error setting/accessing repo %s: %s" % (r, e))
except yum.Errors.YumBaseError, e:
module.fail_json(msg="Error accessing repos: %s" % e)
if state in ['installed', 'present']:
if disable_gpg_check:
yum_basecmd.append('--nogpgcheck')
install(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos)
elif state in ['removed', 'absent']:
remove(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos)
elif state == 'latest':
if disable_gpg_check:
yum_basecmd.append('--nogpgcheck')
latest(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos)
# should be caught by AnsibleModule argument_spec
return dict(changed=False, failed=True, results='', errors='unexpected state')
def main():
# state=installed name=pkgspec
# state=removed name=pkgspec
# state=latest name=pkgspec
#
# informational commands:
# list=installed
# list=updates
# list=available
# list=repos
# list=pkgspec
module = AnsibleModule(
argument_spec = dict(
name=dict(aliases=['pkg']),
# removed==absent, installed==present, these are accepted as aliases
state=dict(default='installed', choices=['absent','present','installed','removed','latest']),
enablerepo=dict(),
disablerepo=dict(),
list=dict(),
conf_file=dict(default=None),
disable_gpg_check=dict(required=False, default="no", type='bool'),
# this should not be needed, but exists as a failsafe
install_repoquery=dict(required=False, default="yes", type='bool'),
),
required_one_of = [['name','list']],
mutually_exclusive = [['name','list']],
supports_check_mode = True
)
# this should not be needed, but exists as a failsafe
params = module.params
if params['install_repoquery'] and not repoquery and not module.check_mode:
install_yum_utils(module)
if params['list']:
if not repoquery:
module.fail_json(msg="repoquery is required to use list= with this module. Please install the yum-utils package.")
results = dict(results=list_stuff(module, params['conf_file'], params['list']))
module.exit_json(**results)
else:
pkg = params['name']
state = params['state']
enablerepo = params.get('enablerepo', '')
disablerepo = params.get('disablerepo', '')
disable_gpg_check = params['disable_gpg_check']
res = ensure(module, state, pkg, params['conf_file'], enablerepo,
disablerepo, disable_gpg_check)
module.fail_json(msg="we should never get here unless this all failed", **res)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,607 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: git
author: Michael DeHaan
version_added: "0.0.1"
short_description: Deploy software (or files) from git checkouts
description:
- Manage I(git) checkouts of repositories to deploy files or software.
options:
repo:
required: true
aliases: [ name ]
description:
- git, SSH, or HTTP protocol address of the git repository.
dest:
required: false
description:
- Absolute path of where the repository should be checked out to.
This parameter is required, unless C(update) is set to C(no)
This change was made in version 1.8. Prior to this version, the
C(dest) parameter was always required.
version:
required: false
default: "HEAD"
description:
- What version of the repository to check out. This can be the
full 40-character I(SHA-1) hash, the literal string C(HEAD), a
branch name, or a tag name.
accept_hostkey:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.5"
description:
- if C(yes), adds the hostkey for the repo url if not already
added. If ssh_args contains "-o StrictHostKeyChecking=no",
this parameter is ignored.
ssh_opts:
required: false
default: None
version_added: "1.5"
description:
- Creates a wrapper script and exports the path as GIT_SSH
which git then automatically uses to override ssh arguments.
An example value could be "-o StrictHostKeyChecking=no"
key_file:
required: false
default: None
version_added: "1.5"
description:
- Specify an optional private key file to use for the checkout.
reference:
required: false
default: null
version_added: "1.4"
description:
- Reference repository (see "git clone --reference ...")
remote:
required: false
default: "origin"
description:
- Name of the remote.
force:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "0.7"
description:
- If C(yes), any modified files in the working
repository will be discarded. Prior to 0.7, this was always
'yes' and could not be disabled.
depth:
required: false
default: null
version_added: "1.2"
description:
- Create a shallow clone with a history truncated to the specified
number or revisions. The minimum possible value is C(1), otherwise
ignored.
update:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "1.2"
description:
- If C(no), just returns information about the repository without updating.
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to git executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
bare:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.4"
description:
- if C(yes), repository will be created as a bare repo, otherwise
it will be a standard repo with a workspace.
recursive:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "1.6"
description:
- if C(no), repository will be cloned without the --recursive
option, skipping sub-modules.
notes:
- "If the task seems to be hanging, first verify remote host is in C(known_hosts).
SSH will prompt user to authorize the first contact with a remote host. To avoid this prompt,
one solution is to add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling
the git module, with the following command: ssh-keyscan -H remote_host.com >> /etc/ssh/ssh_known_hosts."
'''
EXAMPLES = '''
# Example git checkout from Ansible Playbooks
- git: repo=git://foosball.example.org/path/to/repo.git
dest=/srv/checkout
version=release-0.22
# Example read-write git checkout from github
- git: repo=ssh://git@github.com/mylogin/hello.git dest=/home/mylogin/hello
# Example just ensuring the repo checkout exists
- git: repo=git://foosball.example.org/path/to/repo.git dest=/srv/checkout update=no
'''
import re
import tempfile
def get_submodule_update_params(module, git_path, cwd):
#or: git submodule [--quiet] update [--init] [-N|--no-fetch]
#[-f|--force] [--rebase] [--reference <repository>] [--merge]
#[--recursive] [--] [<path>...]
params = []
# run a bad submodule command to get valid params
cmd = "%s submodule update --help" % (git_path)
rc, stdout, stderr = module.run_command(cmd, cwd=cwd)
lines = stderr.split('\n')
update_line = None
for line in lines:
if 'git submodule [--quiet] update ' in line:
update_line = line
if update_line:
update_line = update_line.replace('[','')
update_line = update_line.replace(']','')
update_line = update_line.replace('|',' ')
parts = shlex.split(update_line)
for part in parts:
if part.startswith('--'):
part = part.replace('--', '')
params.append(part)
return params
def write_ssh_wrapper():
module_dir = get_module_path()
try:
# make sure we have full permission to the module_dir, which
# may not be the case if we're sudo'ing to a non-root user
if os.access(module_dir, os.W_OK|os.R_OK|os.X_OK):
fd, wrapper_path = tempfile.mkstemp(prefix=module_dir + '/')
else:
raise OSError
except (IOError, OSError):
fd, wrapper_path = tempfile.mkstemp()
fh = os.fdopen(fd, 'w+b')
template = """#!/bin/sh
if [ -z "$GIT_SSH_OPTS" ]; then
BASEOPTS=""
else
BASEOPTS=$GIT_SSH_OPTS
fi
if [ -z "$GIT_KEY" ]; then
ssh $BASEOPTS "$@"
else
ssh -i "$GIT_KEY" $BASEOPTS "$@"
fi
"""
fh.write(template)
fh.close()
st = os.stat(wrapper_path)
os.chmod(wrapper_path, st.st_mode | stat.S_IEXEC)
return wrapper_path
def set_git_ssh(ssh_wrapper, key_file, ssh_opts):
if os.environ.get("GIT_SSH"):
del os.environ["GIT_SSH"]
os.environ["GIT_SSH"] = ssh_wrapper
if os.environ.get("GIT_KEY"):
del os.environ["GIT_KEY"]
if key_file:
os.environ["GIT_KEY"] = key_file
if os.environ.get("GIT_SSH_OPTS"):
del os.environ["GIT_SSH_OPTS"]
if ssh_opts:
os.environ["GIT_SSH_OPTS"] = ssh_opts
def get_version(module, git_path, dest, ref="HEAD"):
''' samples the version of the git repo '''
cmd = "%s rev-parse %s" % (git_path, ref)
rc, stdout, stderr = module.run_command(cmd, cwd=dest)
sha = stdout.rstrip('\n')
return sha
def clone(git_path, module, repo, dest, remote, depth, version, bare,
reference, recursive):
''' makes a new git repo if it does not already exist '''
dest_dirname = os.path.dirname(dest)
try:
os.makedirs(dest_dirname)
except:
pass
cmd = [ git_path, 'clone' ]
if bare:
cmd.append('--bare')
else:
cmd.extend([ '--origin', remote ])
if recursive:
cmd.extend([ '--recursive' ])
if is_remote_branch(git_path, module, dest, repo, version) \
or is_remote_tag(git_path, module, dest, repo, version):
cmd.extend([ '--branch', version ])
if depth:
cmd.extend([ '--depth', str(depth) ])
if reference:
cmd.extend([ '--reference', str(reference) ])
cmd.extend([ repo, dest ])
module.run_command(cmd, check_rc=True, cwd=dest_dirname)
if bare:
if remote != 'origin':
module.run_command([git_path, 'remote', 'add', remote, repo], check_rc=True, cwd=dest)
def has_local_mods(module, git_path, dest, bare):
if bare:
return False
cmd = "%s status -s" % (git_path)
rc, stdout, stderr = module.run_command(cmd, cwd=dest)
lines = stdout.splitlines()
lines = filter(lambda c: not re.search('^\\?\\?.*$', c), lines)
return len(lines) > 0
def reset(git_path, module, dest):
'''
Resets the index and working tree to HEAD.
Discards any changes to tracked files in working
tree since that commit.
'''
cmd = "%s reset --hard HEAD" % (git_path,)
return module.run_command(cmd, check_rc=True, cwd=dest)
def get_remote_head(git_path, module, dest, version, remote, bare):
cloning = False
cwd = None
if remote == module.params['repo']:
cloning = True
else:
cwd = dest
if version == 'HEAD':
if cloning:
# cloning the repo, just get the remote's HEAD version
cmd = '%s ls-remote %s -h HEAD' % (git_path, remote)
else:
head_branch = get_head_branch(git_path, module, dest, remote, bare)
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, head_branch)
elif is_remote_branch(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, version)
elif is_remote_tag(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -t refs/tags/%s' % (git_path, remote, version)
else:
# appears to be a sha1. return as-is since it appears
# cannot check for a specific sha1 on remote
return version
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=cwd)
if len(out) < 1:
module.fail_json(msg="Could not determine remote revision for %s" % version)
rev = out.split()[0]
return rev
def is_remote_tag(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -t refs/tags/%s' % (git_path, remote, version)
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if version in out:
return True
else:
return False
def get_branches(git_path, module, dest):
branches = []
cmd = '%s branch -a' % (git_path,)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Could not determine branch data - received %s" % out)
for line in out.split('\n'):
branches.append(line.strip())
return branches
def get_tags(git_path, module, dest):
tags = []
cmd = '%s tag' % (git_path,)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Could not determine tag data - received %s" % out)
for line in out.split('\n'):
tags.append(line.strip())
return tags
def is_remote_branch(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, version)
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if version in out:
return True
else:
return False
def is_local_branch(git_path, module, dest, branch):
branches = get_branches(git_path, module, dest)
lbranch = '%s' % branch
if lbranch in branches:
return True
elif '* %s' % branch in branches:
return True
else:
return False
def is_not_a_branch(git_path, module, dest):
branches = get_branches(git_path, module, dest)
for b in branches:
if b.startswith('* ') and 'no branch' in b:
return True
return False
def get_head_branch(git_path, module, dest, remote, bare=False):
'''
Determine what branch HEAD is associated with. This is partly
taken from lib/ansible/utils/__init__.py. It finds the correct
path to .git/HEAD and reads from that file the branch that HEAD is
associated with. In the case of a detached HEAD, this will look
up the branch in .git/refs/remotes/<remote>/HEAD.
'''
if bare:
repo_path = dest
else:
repo_path = os.path.join(dest, '.git')
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
if os.path.isfile(repo_path):
try:
gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
# There is a posibility the .git file to have an absolute path.
if os.path.isabs(gitdir):
repo_path = gitdir
else:
repo_path = os.path.join(repo_path.split('.git')[0], gitdir)
except (IOError, AttributeError):
return ''
# Read .git/HEAD for the name of the branch.
# If we're in a detached HEAD state, look up the branch associated with
# the remote HEAD in .git/refs/remotes/<remote>/HEAD
f = open(os.path.join(repo_path, "HEAD"))
if is_not_a_branch(git_path, module, dest):
f.close()
f = open(os.path.join(repo_path, 'refs', 'remotes', remote, 'HEAD'))
branch = f.readline().split('/')[-1].rstrip("\n")
f.close()
return branch
def fetch(git_path, module, repo, dest, version, remote, bare):
''' updates repo from remote sources '''
(rc, out0, err0) = module.run_command([git_path, 'remote', 'set-url', remote, repo], cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to set a new url %s for %s: %s" % (repo, remote, out0 + err0))
if bare:
(rc, out1, err1) = module.run_command([git_path, 'fetch', remote, '+refs/heads/*:refs/heads/*'], cwd=dest)
else:
(rc, out1, err1) = module.run_command("%s fetch %s" % (git_path, remote), cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to download remote objects and refs")
if bare:
(rc, out2, err2) = module.run_command([git_path, 'fetch', remote, '+refs/tags/*:refs/tags/*'], cwd=dest)
else:
(rc, out2, err2) = module.run_command("%s fetch --tags %s" % (git_path, remote), cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to download remote objects and refs")
(rc, out3, err3) = submodule_update(git_path, module, dest)
return (rc, out1 + out2 + out3, err1 + err2 + err3)
def submodule_update(git_path, module, dest):
''' init and update any submodules '''
# get the valid submodule params
params = get_submodule_update_params(module, git_path, dest)
# skip submodule commands if .gitmodules is not present
if not os.path.exists(os.path.join(dest, '.gitmodules')):
return (0, '', '')
cmd = [ git_path, 'submodule', 'sync' ]
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if 'remote' in params:
cmd = [ git_path, 'submodule', 'update', '--init', '--recursive' ,'--remote' ]
else:
cmd = [ git_path, 'submodule', 'update', '--init', '--recursive' ]
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to init/update submodules: %s" % out + err)
return (rc, out, err)
def switch_version(git_path, module, dest, remote, version, recursive):
''' once pulled, switch to a particular SHA, tag, or branch '''
cmd = ''
if version != 'HEAD':
if is_remote_branch(git_path, module, dest, remote, version):
if not is_local_branch(git_path, module, dest, version):
cmd = "%s checkout --track -b %s %s/%s" % (git_path, version, remote, version)
else:
(rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, version), cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to checkout branch %s" % version)
cmd = "%s reset --hard %s/%s" % (git_path, remote, version)
else:
cmd = "%s checkout --force %s" % (git_path, version)
else:
branch = get_head_branch(git_path, module, dest, remote)
(rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, branch), cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to checkout branch %s" % branch)
cmd = "%s reset --hard %s" % (git_path, remote)
(rc, out1, err1) = module.run_command(cmd, cwd=dest)
if rc != 0:
if version != 'HEAD':
module.fail_json(msg="Failed to checkout %s" % (version))
else:
module.fail_json(msg="Failed to checkout branch %s" % (branch))
if recursive:
(rc, out2, err2) = submodule_update(git_path, module, dest)
out1 += out2
err1 += err1
return (rc, out1, err1)
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
dest=dict(),
repo=dict(required=True, aliases=['name']),
version=dict(default='HEAD'),
remote=dict(default='origin'),
reference=dict(default=None),
force=dict(default='yes', type='bool'),
depth=dict(default=None, type='int'),
update=dict(default='yes', type='bool'),
accept_hostkey=dict(default='no', type='bool'),
key_file=dict(default=None, required=False),
ssh_opts=dict(default=None, required=False),
executable=dict(default=None),
bare=dict(default='no', type='bool'),
recursive=dict(default='yes', type='bool'),
),
supports_check_mode=True
)
dest = module.params['dest']
repo = module.params['repo']
version = module.params['version']
remote = module.params['remote']
force = module.params['force']
depth = module.params['depth']
update = module.params['update']
bare = module.params['bare']
reference = module.params['reference']
git_path = module.params['executable'] or module.get_bin_path('git', True)
key_file = module.params['key_file']
ssh_opts = module.params['ssh_opts']
gitconfig = None
if not dest and update:
module.fail_json(msg="the destination directory must be specified unless update=no")
elif dest:
dest = os.path.abspath(os.path.expanduser(dest))
if bare:
gitconfig = os.path.join(dest, 'config')
else:
gitconfig = os.path.join(dest, '.git', 'config')
# create a wrapper script and export
# GIT_SSH=<path> as an environment variable
# for git to use the wrapper script
ssh_wrapper = None
if key_file or ssh_opts:
ssh_wrapper = write_ssh_wrapper()
set_git_ssh(ssh_wrapper, key_file, ssh_opts)
module.add_cleanup_file(path=ssh_wrapper)
# add the git repo's hostkey
if module.params['ssh_opts'] is not None:
if not "-o StrictHostKeyChecking=no" in module.params['ssh_opts']:
add_git_host_key(module, repo, accept_hostkey=module.params['accept_hostkey'])
else:
add_git_host_key(module, repo, accept_hostkey=module.params['accept_hostkey'])
recursive = module.params['recursive']
rc, out, err, status = (0, None, None, None)
before = None
local_mods = False
if gitconfig and not os.path.exists(gitconfig) or not gitconfig and not update:
# if there is no git configuration, do a clone operation unless the
# user requested no updates or we're doing a check mode test (in
# which case we do a ls-remote), otherwise clone the repo
if module.check_mode or not update:
remote_head = get_remote_head(git_path, module, dest, version, repo, bare)
module.exit_json(changed=True, before=before, after=remote_head)
# there's no git config, so clone
clone(git_path, module, repo, dest, remote, depth, version, bare, reference, recursive)
elif not update:
# Just return having found a repo already in the dest path
# this does no checking that the repo is the actual repo
# requested.
before = get_version(module, git_path, dest)
module.exit_json(changed=False, before=before, after=before)
else:
# else do a pull
local_mods = has_local_mods(module, git_path, dest, bare)
before = get_version(module, git_path, dest)
if local_mods:
# failure should happen regardless of check mode
if not force:
module.fail_json(msg="Local modifications exist in repository (force=no).")
# if force and in non-check mode, do a reset
if not module.check_mode:
reset(git_path, module, dest)
# exit if already at desired sha version
remote_head = get_remote_head(git_path, module, dest, version, remote, bare)
if before == remote_head:
if local_mods:
module.exit_json(changed=True, before=before, after=remote_head,
msg="Local modifications exist")
elif is_remote_tag(git_path, module, dest, repo, version):
# if the remote is a tag and we have the tag locally, exit early
if version in get_tags(git_path, module, dest):
module.exit_json(changed=False, before=before, after=remote_head)
else:
module.exit_json(changed=False, before=before, after=remote_head)
if module.check_mode:
module.exit_json(changed=True, before=before, after=remote_head)
fetch(git_path, module, repo, dest, version, remote, bare)
# switch to version specified regardless of whether
# we cloned or pulled
if not bare:
switch_version(git_path, module, dest, remote, version, recursive)
# determine if we changed anything
after = get_version(module, git_path, dest)
changed = False
if before != after or local_mods:
changed = True
# cleanup the wrapper script
if ssh_wrapper:
os.remove(ssh_wrapper)
module.exit_json(changed=changed, before=before, after=after)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.known_hosts import *
main()

@ -1,238 +0,0 @@
#!/usr/bin/python
#-*- coding: utf-8 -*-
# (c) 2013, Yeukhon Wong <yeukhon@acm.org>
#
# This module was originally inspired by Brad Olson's ansible-module-mercurial
# <https://github.com/bradobro/ansible-module-mercurial>. This module tends
# to follow the git module implementation.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import ConfigParser
DOCUMENTATION = '''
---
module: hg
short_description: Manages Mercurial (hg) repositories.
description:
- Manages Mercurial (hg) repositories. Supports SSH, HTTP/S and local address.
version_added: "1.0"
author: Yeukhon Wong
options:
repo:
description:
- The repository address.
required: true
default: null
aliases: [ name ]
dest:
description:
- Absolute path of where the repository should be cloned to.
required: true
default: null
revision:
description:
- Equivalent C(-r) option in hg command which could be the changeset, revision number,
branch name or even tag.
required: false
default: "default"
aliases: [ version ]
force:
description:
- Discards uncommitted changes. Runs C(hg update -C).
required: false
default: "yes"
choices: [ "yes", "no" ]
purge:
description:
- Deletes untracked files. Runs C(hg purge).
required: false
default: "no"
choices: [ "yes", "no" ]
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to hg executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
notes:
- "If the task seems to be hanging, first verify remote host is in C(known_hosts).
SSH will prompt user to authorize the first contact with a remote host. To avoid this prompt,
one solution is to add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling
the hg module, with the following command: ssh-keyscan remote_host.com >> /etc/ssh/ssh_known_hosts."
requirements: [ ]
'''
EXAMPLES = '''
# Ensure the current working copy is inside the stable branch and deletes untracked files if any.
- hg: repo=https://bitbucket.org/user/repo1 dest=/home/user/repo1 revision=stable purge=yes
'''
class Hg(object):
def __init__(self, module, dest, repo, revision, hg_path):
self.module = module
self.dest = dest
self.repo = repo
self.revision = revision
self.hg_path = hg_path
def _command(self, args_list):
(rc, out, err) = self.module.run_command([self.hg_path] + args_list)
return (rc, out, err)
def _list_untracked(self):
args = ['purge', '--config', 'extensions.purge=', '-R', self.dest, '--print']
return self._command(args)
def get_revision(self):
"""
hg id -b -i -t returns a string in the format:
"<changeset>[+] <branch_name> <tag>"
This format lists the state of the current working copy,
and indicates whether there are uncommitted changes by the
plus sign. Otherwise, the sign is omitted.
Read the full description via hg id --help
"""
(rc, out, err) = self._command(['id', '-b', '-i', '-t', '-R', self.dest])
if rc != 0:
self.module.fail_json(msg=err)
else:
return out.strip('\n')
def has_local_mods(self):
now = self.get_revision()
if '+' in now:
return True
else:
return False
def discard(self):
before = self.has_local_mods()
if not before:
return False
(rc, out, err) = self._command(['update', '-C', '-R', self.dest])
if rc != 0:
self.module.fail_json(msg=err)
after = self.has_local_mods()
if before != after and not after: # no more local modification
return True
def purge(self):
# before purge, find out if there are any untracked files
(rc1, out1, err1) = self._list_untracked()
if rc1 != 0:
self.module.fail_json(msg=err1)
# there are some untrackd files
if out1 != '':
args = ['purge', '--config', 'extensions.purge=', '-R', self.dest]
(rc2, out2, err2) = self._command(args)
if rc2 != 0:
self.module.fail_json(msg=err2)
return True
else:
return False
def cleanup(self, force, purge):
discarded = False
purged = False
if force:
discarded = self.discard()
if purge:
purged = self.purge()
if discarded or purged:
return True
else:
return False
def pull(self):
return self._command(
['pull', '-R', self.dest, self.repo])
def update(self):
return self._command(['update', '-R', self.dest])
def clone(self):
return self._command(['clone', self.repo, self.dest, '-r', self.revision])
def switch_version(self):
return self._command(['update', '-r', self.revision, '-R', self.dest])
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
repo = dict(required=True, aliases=['name']),
dest = dict(required=True),
revision = dict(default="default", aliases=['version']),
force = dict(default='yes', type='bool'),
purge = dict(default='no', type='bool'),
executable = dict(default=None),
),
)
repo = module.params['repo']
dest = os.path.expanduser(module.params['dest'])
revision = module.params['revision']
force = module.params['force']
purge = module.params['purge']
hg_path = module.params['executable'] or module.get_bin_path('hg', True)
hgrc = os.path.join(dest, '.hg/hgrc')
# initial states
before = ''
changed = False
cleaned = False
hg = Hg(module, dest, repo, revision, hg_path)
# If there is no hgrc file, then assume repo is absent
# and perform clone. Otherwise, perform pull and update.
if not os.path.exists(hgrc):
(rc, out, err) = hg.clone()
if rc != 0:
module.fail_json(msg=err)
else:
# get the current state before doing pulling
before = hg.get_revision()
# can perform force and purge
cleaned = hg.cleanup(force, purge)
(rc, out, err) = hg.pull()
if rc != 0:
module.fail_json(msg=err)
(rc, out, err) = hg.update()
if rc != 0:
module.fail_json(msg=err)
hg.switch_version()
after = hg.get_revision()
if before != after or cleaned:
changed = True
module.exit_json(before=before, after=after, changed=changed, cleaned=cleaned)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,231 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: subversion
short_description: Deploys a subversion repository.
description:
- Deploy given repository URL / revision to dest. If dest exists, update to the specified revision, otherwise perform a checkout.
version_added: "0.7"
author: Dane Summers, njharman@gmail.com
notes:
- Requires I(svn) to be installed on the client.
requirements: []
options:
repo:
description:
- The subversion URL to the repository.
required: true
aliases: [ name, repository ]
default: null
dest:
description:
- Absolute path where the repository should be deployed.
required: true
default: null
revision:
description:
- Specific revision to checkout.
required: false
default: HEAD
aliases: [ version ]
force:
description:
- If C(yes), modified files will be discarded. If C(no), module will fail if it encounters modified files.
required: false
default: "yes"
choices: [ "yes", "no" ]
username:
description:
- --username parameter passed to svn.
required: false
default: null
password:
description:
- --password parameter passed to svn.
required: false
default: null
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to svn executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
export:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.6"
description:
- If C(yes), do export instead of checkout/update.
'''
EXAMPLES = '''
# Checkout subversion repository to specified folder.
- subversion: repo=svn+ssh://an.example.org/path/to/repo dest=/src/checkout
# Export subversion directory to folder
- subversion: repo=svn+ssh://an.example.org/path/to/repo dest=/src/export export=True
'''
import re
import tempfile
class Subversion(object):
def __init__(
self, module, dest, repo, revision, username, password, svn_path):
self.module = module
self.dest = dest
self.repo = repo
self.revision = revision
self.username = username
self.password = password
self.svn_path = svn_path
def _exec(self, args):
bits = [
self.svn_path,
'--non-interactive',
'--trust-server-cert',
'--no-auth-cache',
]
if self.username:
bits.extend(["--username", self.username])
if self.password:
bits.extend(["--password", self.password])
bits.extend(args)
rc, out, err = self.module.run_command(bits, check_rc=True)
return out.splitlines()
def checkout(self):
'''Creates new svn working directory if it does not already exist.'''
self._exec(["checkout", "-r", self.revision, self.repo, self.dest])
def export(self, force=False):
'''Export svn repo to directory'''
self._exec(["export", "-r", self.revision, self.repo, self.dest])
def switch(self):
'''Change working directory's repo.'''
# switch to ensure we are pointing at correct repo.
self._exec(["switch", self.repo, self.dest])
def update(self):
'''Update existing svn working directory.'''
self._exec(["update", "-r", self.revision, self.dest])
def revert(self):
'''Revert svn working directory.'''
self._exec(["revert", "-R", self.dest])
def get_revision(self):
'''Revision and URL of subversion working directory.'''
text = '\n'.join(self._exec(["info", self.dest]))
rev = re.search(r'^Revision:.*$', text, re.MULTILINE).group(0)
url = re.search(r'^URL:.*$', text, re.MULTILINE).group(0)
return rev, url
def has_local_mods(self):
'''True if revisioned files have been added or modified. Unrevisioned files are ignored.'''
lines = self._exec(["status", self.dest])
# Match only revisioned files, i.e. ignore status '?'.
regex = re.compile(r'^[^?]')
# Has local mods if more than 0 modifed revisioned files.
return len(filter(regex.match, lines)) > 0
def needs_update(self):
curr, url = self.get_revision()
out2 = '\n'.join(self._exec(["info", "-r", "HEAD", self.dest]))
head = re.search(r'^Revision:.*$', out2, re.MULTILINE).group(0)
rev1 = int(curr.split(':')[1].strip())
rev2 = int(head.split(':')[1].strip())
change = False
if rev1 < rev2:
change = True
return change, curr, head
# ===========================================
def main():
module = AnsibleModule(
argument_spec=dict(
dest=dict(required=True),
repo=dict(required=True, aliases=['name', 'repository']),
revision=dict(default='HEAD', aliases=['rev', 'version']),
force=dict(default='yes', type='bool'),
username=dict(required=False),
password=dict(required=False),
executable=dict(default=None),
export=dict(default=False, required=False, type='bool'),
),
supports_check_mode=True
)
dest = os.path.expanduser(module.params['dest'])
repo = module.params['repo']
revision = module.params['revision']
force = module.params['force']
username = module.params['username']
password = module.params['password']
svn_path = module.params['executable'] or module.get_bin_path('svn', True)
export = module.params['export']
os.environ['LANG'] = 'C'
svn = Subversion(module, dest, repo, revision, username, password, svn_path)
if not os.path.exists(dest):
before = None
local_mods = False
if module.check_mode:
module.exit_json(changed=True)
if not export:
svn.checkout()
else:
svn.export()
elif os.path.exists("%s/.svn" % (dest, )):
# Order matters. Need to get local mods before switch to avoid false
# positives. Need to switch before revert to ensure we are reverting to
# correct repo.
if module.check_mode:
check, before, after = svn.needs_update()
module.exit_json(changed=check, before=before, after=after)
before = svn.get_revision()
local_mods = svn.has_local_mods()
svn.switch()
if local_mods:
if force:
svn.revert()
else:
module.fail_json(msg="ERROR: modified files exist in the repository.")
svn.update()
else:
module.fail_json(msg="ERROR: %s folder already exists, but its not a subversion repository." % (dest, ))
after = svn.get_revision()
changed = before != after or local_mods
module.exit_json(changed=changed, before=before, after=after)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,89 +0,0 @@
#!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013-2014, Christian Berendt <berendt@b1-systems.de>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: apache2_module
version_added: 1.6
short_description: enables/disables a module of the Apache2 webserver
description:
- Enables or disables a specified module of the Apache2 webserver.
options:
name:
description:
- name of the module to enable/disable
required: true
state:
description:
- indicate the desired state of the resource
choices: ['present', 'absent']
default: present
'''
EXAMPLES = '''
# enables the Apache2 module "wsgi"
- apache2_module: state=present name=wsgi
# disables the Apache2 module "wsgi"
- apache2_module: state=absent name=wsgi
'''
import re
def _disable_module(module):
name = module.params['name']
a2dismod_binary = module.get_bin_path("a2dismod")
result, stdout, stderr = module.run_command("%s %s" % (a2dismod_binary, name))
if re.match(r'.*' + name + r' already disabled.*', stdout, re.S):
module.exit_json(changed = False, result = "Success")
elif result != 0:
module.fail_json(msg="Failed to disable module %s: %s" % (name, stdout))
else:
module.exit_json(changed = True, result = "Disabled")
def _enable_module(module):
name = module.params['name']
a2enmod_binary = module.get_bin_path("a2enmod")
result, stdout, stderr = module.run_command("%s %s" % (a2enmod_binary, name))
if re.match(r'.*' + name + r' already enabled.*', stdout, re.S):
module.exit_json(changed = False, result = "Success")
elif result != 0:
module.fail_json(msg="Failed to enable module %s: %s" % (name, stdout))
else:
module.exit_json(changed = True, result = "Enabled")
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(default='present', choices=['absent', 'present'])
),
)
if module.params['state'] == 'present':
_enable_module(module)
if module.params['state'] == 'absent':
_disable_module(module)
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,281 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Scott Anderson <scottanderson42@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: django_manage
short_description: Manages a Django application.
description:
- Manages a Django application using the I(manage.py) application frontend to I(django-admin). With the I(virtualenv) parameter, all management commands will be executed by the given I(virtualenv) installation.
version_added: "1.1"
options:
command:
choices: [ 'cleanup', 'collectstatic', 'flush', 'loaddata', 'migrate', 'runfcgi', 'syncdb', 'test', 'validate', ]
description:
- The name of the Django management command to run. Built in commands are cleanup, collectstatic, flush, loaddata, migrate, runfcgi, syncdb, test, and validate. Other commands can be entered, but will fail if they're unknown to Django.
required: true
app_path:
description:
- The path to the root of the Django application where B(manage.py) lives.
required: true
settings:
description:
- The Python path to the application's settings module, such as 'myapp.settings'.
required: false
pythonpath:
description:
- A directory to add to the Python path. Typically used to include the settings module if it is located external to the application directory.
required: false
virtualenv:
description:
- An optional path to a I(virtualenv) installation to use while running the manage application.
required: false
apps:
description:
- A list of space-delimited apps to target. Used by the 'test' command.
required: false
cache_table:
description:
- The name of the table used for database-backed caching. Used by the 'createcachetable' command.
required: false
database:
description:
- The database to target. Used by the 'createcachetable', 'flush', 'loaddata', and 'syncdb' commands.
required: false
failfast:
description:
- Fail the command immediately if a test fails. Used by the 'test' command.
required: false
default: "no"
choices: [ "yes", "no" ]
fixtures:
description:
- A space-delimited list of fixture file names to load in the database. B(Required) by the 'loaddata' command.
required: false
skip:
description:
- Will skip over out-of-order missing migrations, you can only use this parameter with I(migrate)
required: false
version_added: "1.3"
merge:
description:
- Will run out-of-order or missing migrations as they are not rollback migrations, you can only use this parameter with 'migrate' command
required: false
version_added: "1.3"
link:
description:
- Will create links to the files instead of copying them, you can only use this parameter with 'collectstatic' command
required: false
version_added: "1.3"
notes:
- I(virtualenv) (U(http://www.virtualenv.org)) must be installed on the remote host if the virtualenv parameter is specified.
- This module will create a virtualenv if the virtualenv parameter is specified and a virtualenv does not already exist at the given location.
- This module assumes English error messages for the 'createcachetable' command to detect table existence, unfortunately.
- To be able to use the migrate command, you must have south installed and added as an app in your settings
- To be able to use the collectstatic command, you must have enabled staticfiles in your settings
requirements: [ "virtualenv", "django" ]
author: Scott Anderson
'''
EXAMPLES = """
# Run cleanup on the application installed in 'django_dir'.
- django_manage: command=cleanup app_path={{ django_dir }}
# Load the initial_data fixture into the application
- django_manage: command=loaddata app_path={{ django_dir }} fixtures={{ initial_data }}
#Run syncdb on the application
- django_manage: >
command=syncdb
app_path={{ django_dir }}
settings={{ settings_app_name }}
pythonpath={{ settings_dir }}
virtualenv={{ virtualenv_dir }}
#Run the SmokeTest test case from the main app. Useful for testing deploys.
- django_manage: command=test app_path=django_dir apps=main.SmokeTest
"""
import os
def _fail(module, cmd, out, err, **kwargs):
msg = ''
if out:
msg += "stdout: %s" % (out, )
if err:
msg += "\n:stderr: %s" % (err, )
module.fail_json(cmd=cmd, msg=msg, **kwargs)
def _ensure_virtualenv(module):
venv_param = module.params['virtualenv']
if venv_param is None:
return
vbin = os.path.join(os.path.expanduser(venv_param), 'bin')
activate = os.path.join(vbin, 'activate')
if not os.path.exists(activate):
virtualenv = module.get_bin_path('virtualenv', True)
vcmd = '%s %s' % (virtualenv, venv_param)
vcmd = [virtualenv, venv_param]
rc, out_venv, err_venv = module.run_command(vcmd)
if rc != 0:
_fail(module, vcmd, out_venv, err_venv)
os.environ["PATH"] = "%s:%s" % (vbin, os.environ["PATH"])
os.environ["VIRTUAL_ENV"] = venv_param
def createcachetable_filter_output(line):
return "Already exists" not in line
def flush_filter_output(line):
return "Installed" in line and "Installed 0 object" not in line
def loaddata_filter_output(line):
return "Installed" in line and "Installed 0 object" not in line
def syncdb_filter_output(line):
return ("Creating table " in line) or ("Installed" in line and "Installed 0 object" not in line)
def migrate_filter_output(line):
return ("Migrating forwards " in line) or ("Installed" in line and "Installed 0 object" not in line)
def main():
command_allowed_param_map = dict(
cleanup=(),
createcachetable=('cache_table', 'database', ),
flush=('database', ),
loaddata=('database', 'fixtures', ),
syncdb=('database', ),
test=('failfast', 'testrunner', 'liveserver', 'apps', ),
validate=(),
migrate=('apps', 'skip', 'merge'),
collectstatic=('link', ),
)
command_required_param_map = dict(
loaddata=('fixtures', ),
createcachetable=('cache_table', ),
)
# forces --noinput on every command that needs it
noinput_commands = (
'flush',
'syncdb',
'migrate',
'test',
'collectstatic',
)
# These params are allowed for certain commands only
specific_params = ('apps', 'database', 'failfast', 'fixtures', 'liveserver', 'testrunner')
# These params are automatically added to the command if present
general_params = ('settings', 'pythonpath', 'database',)
specific_boolean_params = ('failfast', 'skip', 'merge', 'link')
end_of_command_params = ('apps', 'cache_table', 'fixtures')
module = AnsibleModule(
argument_spec=dict(
command = dict(default=None, required=True),
app_path = dict(default=None, required=True),
settings = dict(default=None, required=False),
pythonpath = dict(default=None, required=False, aliases=['python_path']),
virtualenv = dict(default=None, required=False, aliases=['virtual_env']),
apps = dict(default=None, required=False),
cache_table = dict(default=None, required=False),
database = dict(default=None, required=False),
failfast = dict(default='no', required=False, type='bool', aliases=['fail_fast']),
fixtures = dict(default=None, required=False),
liveserver = dict(default=None, required=False, aliases=['live_server']),
testrunner = dict(default=None, required=False, aliases=['test_runner']),
skip = dict(default=None, required=False, type='bool'),
merge = dict(default=None, required=False, type='bool'),
link = dict(default=None, required=False, type='bool'),
),
)
command = module.params['command']
app_path = module.params['app_path']
virtualenv = module.params['virtualenv']
for param in specific_params:
value = module.params[param]
if param in specific_boolean_params:
value = module.boolean(value)
if value and param not in command_allowed_param_map[command]:
module.fail_json(msg='%s param is incompatible with command=%s' % (param, command))
for param in command_required_param_map.get(command, ()):
if not module.params[param]:
module.fail_json(msg='%s param is required for command=%s' % (param, command))
venv = module.params['virtualenv']
_ensure_virtualenv(module)
cmd = "python manage.py %s" % (command, )
if command in noinput_commands:
cmd = '%s --noinput' % cmd
for param in general_params:
if module.params[param]:
cmd = '%s --%s=%s' % (cmd, param, module.params[param])
for param in specific_boolean_params:
if module.boolean(module.params[param]):
cmd = '%s --%s' % (cmd, param)
# these params always get tacked on the end of the command
for param in end_of_command_params:
if module.params[param]:
cmd = '%s %s' % (cmd, module.params[param])
rc, out, err = module.run_command(cmd, cwd=app_path)
if rc != 0:
if command == 'createcachetable' and 'table' in err and 'already exists' in err:
out = 'Already exists.'
else:
if "Unknown command:" in err:
_fail(module, cmd, err, "Unknown django command: %s" % command)
_fail(module, cmd, out, err, path=os.environ["PATH"], syspath=sys.path)
changed = False
lines = out.split('\n')
filt = globals().get(command + "_filter_output", None)
if filt:
filtered_output = filter(filt, out.split('\n'))
if len(filtered_output):
changed = filtered_output
module.exit_json(changed=changed, out=out, cmd=cmd, app_path=app_path, virtualenv=virtualenv,
settings=module.params['settings'], pythonpath=module.params['pythonpath'])
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,219 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Nimbis Services, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = """
module: htpasswd
version_added: "1.3"
short_description: manage user files for basic authentication
description:
- Add and remove username/password entries in a password file using htpasswd.
- This is used by web servers such as Apache and Nginx for basic authentication.
options:
path:
required: true
aliases: [ dest, destfile ]
description:
- Path to the file that contains the usernames and passwords
name:
required: true
aliases: [ username ]
description:
- User name to add or remove
password:
required: false
description:
- Password associated with user.
- Must be specified if user does not exist yet.
crypt_scheme:
required: false
choices: ["apr_md5_crypt", "des_crypt", "ldap_sha1", "plaintext"]
default: "apr_md5_crypt"
description:
- Encryption scheme to be used.
state:
required: false
choices: [ present, absent ]
default: "present"
description:
- Whether the user entry should be present or not
create:
required: false
choices: [ "yes", "no" ]
default: "yes"
description:
- Used with C(state=present). If specified, the file will be created
if it does not already exist. If set to "no", will fail if the
file does not exist
notes:
- "This module depends on the I(passlib) Python library, which needs to be installed on all target systems."
- "On Debian, Ubuntu, or Fedora: install I(python-passlib)."
- "On RHEL or CentOS: Enable EPEL, then install I(python-passlib)."
requires: [ passlib>=1.6 ]
author: Lorin Hochstein
"""
EXAMPLES = """
# Add a user to a password file and ensure permissions are set
- htpasswd: path=/etc/nginx/passwdfile name=janedoe password=9s36?;fyNp owner=root group=www-data mode=0640
# Remove a user from a password file
- htpasswd: path=/etc/apache2/passwdfile name=foobar state=absent
"""
import os
from distutils.version import StrictVersion
try:
from passlib.apache import HtpasswdFile
import passlib
except ImportError:
passlib_installed = False
else:
passlib_installed = True
def create_missing_directories(dest):
destpath = os.path.dirname(dest)
if not os.path.exists(destpath):
os.makedirs(destpath)
def present(dest, username, password, crypt_scheme, create, check_mode):
""" Ensures user is present
Returns (msg, changed) """
if not os.path.exists(dest):
if not create:
raise ValueError('Destination %s does not exist' % dest)
if check_mode:
return ("Create %s" % dest, True)
create_missing_directories(dest)
if StrictVersion(passlib.__version__) >= StrictVersion('1.6'):
ht = HtpasswdFile(dest, new=True, default_scheme=crypt_scheme)
else:
ht = HtpasswdFile(dest, autoload=False, default=crypt_scheme)
if getattr(ht, 'set_password', None):
ht.set_password(username, password)
else:
ht.update(username, password)
ht.save()
return ("Created %s and added %s" % (dest, username), True)
else:
if StrictVersion(passlib.__version__) >= StrictVersion('1.6'):
ht = HtpasswdFile(dest, new=False, default_scheme=crypt_scheme)
else:
ht = HtpasswdFile(dest, default=crypt_scheme)
found = None
if getattr(ht, 'check_password', None):
found = ht.check_password(username, password)
else:
found = ht.verify(username, password)
if found:
return ("%s already present" % username, False)
else:
if not check_mode:
if getattr(ht, 'set_password', None):
ht.set_password(username, password)
else:
ht.update(username, password)
ht.save()
return ("Add/update %s" % username, True)
def absent(dest, username, check_mode):
""" Ensures user is absent
Returns (msg, changed) """
if not os.path.exists(dest):
raise ValueError("%s does not exists" % dest)
if StrictVersion(passlib.__version__) >= StrictVersion('1.6'):
ht = HtpasswdFile(dest, new=False)
else:
ht = HtpasswdFile(dest)
if username not in ht.users():
return ("%s not present" % username, False)
else:
if not check_mode:
ht.delete(username)
ht.save()
return ("Remove %s" % username, True)
def check_file_attrs(module, changed, message):
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def main():
arg_spec = dict(
path=dict(required=True, aliases=["dest", "destfile"]),
name=dict(required=True, aliases=["username"]),
password=dict(required=False, default=None),
crypt_scheme=dict(required=False, default=None),
state=dict(required=False, default="present"),
create=dict(type='bool', default='yes'),
)
module = AnsibleModule(argument_spec=arg_spec,
add_file_common_args=True,
supports_check_mode=True)
path = module.params['path']
username = module.params['name']
password = module.params['password']
crypt_scheme = module.params['crypt_scheme']
state = module.params['state']
create = module.params['create']
check_mode = module.check_mode
if not passlib_installed:
module.fail_json(msg="This module requires the passlib Python library")
try:
if state == 'present':
(msg, changed) = present(path, username, password, crypt_scheme, create, check_mode)
elif state == 'absent':
(msg, changed) = absent(path, username, check_mode)
else:
module.fail_json(msg="Invalid state: %s" % state)
check_file_attrs(module, changed, msg)
module.exit_json(msg=msg, changed=changed)
except Exception, e:
module.fail_json(msg=str(e))
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

@ -1,221 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Matt Wright <matt@nobien.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import os
DOCUMENTATION = '''
---
module: supervisorctl
short_description: Manage the state of a program or group of programs running via supervisord
description:
- Manage the state of a program or group of programs running via supervisord
version_added: "0.7"
options:
name:
description:
- The name of the supervisord program or group to manage.
- The name will be taken as group name when it ends with a colon I(:)
- Group support is only available in Ansible version 1.6 or later.
required: true
default: null
config:
description:
- The supervisor configuration file path
required: false
default: null
version_added: "1.3"
server_url:
description:
- URL on which supervisord server is listening
required: false
default: null
version_added: "1.3"
username:
description:
- username to use for authentication
required: false
default: null
version_added: "1.3"
password:
description:
- password to use for authentication
required: false
default: null
version_added: "1.3"
state:
description:
- The desired state of program/group.
required: true
default: null
choices: [ "present", "started", "stopped", "restarted" ]
supervisorctl_path:
description:
- path to supervisorctl executable
required: false
default: null
version_added: "1.4"
notes:
- When C(state) = I(present), the module will call C(supervisorctl reread) then C(supervisorctl add) if the program/group does not exist.
- When C(state) = I(restarted), the module will call C(supervisorctl update) then call C(supervisorctl restart).
requirements: [ "supervisorctl" ]
author: Matt Wright, Aaron Wang <inetfuture@gmail.com>
'''
EXAMPLES = '''
# Manage the state of program to be in 'started' state.
- supervisorctl: name=my_app state=started
# Manage the state of program group to be in 'started' state.
- supervisorctl: name='my_apps:' state=started
# Restart my_app, reading supervisorctl configuration from a specified file.
- supervisorctl: name=my_app state=restarted config=/var/opt/my_project/supervisord.conf
# Restart my_app, connecting to supervisord with credentials and server URL.
- supervisorctl: name=my_app state=restarted username=test password=testpass server_url=http://localhost:9001
'''
def main():
arg_spec = dict(
name=dict(required=True),
config=dict(required=False),
server_url=dict(required=False),
username=dict(required=False),
password=dict(required=False),
supervisorctl_path=dict(required=False),
state=dict(required=True, choices=['present', 'started', 'restarted', 'stopped'])
)
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
name = module.params['name']
is_group = False
if name.endswith(':'):
is_group = True
name = name.rstrip(':')
state = module.params['state']
config = module.params.get('config')
server_url = module.params.get('server_url')
username = module.params.get('username')
password = module.params.get('password')
supervisorctl_path = module.params.get('supervisorctl_path')
if supervisorctl_path:
supervisorctl_path = os.path.expanduser(supervisorctl_path)
if os.path.exists(supervisorctl_path) and module.is_executable(supervisorctl_path):
supervisorctl_args = [supervisorctl_path]
else:
module.fail_json(
msg="Provided path to supervisorctl does not exist or isn't executable: %s" % supervisorctl_path)
else:
supervisorctl_args = [module.get_bin_path('supervisorctl', True)]
if config:
supervisorctl_args.extend(['-c', os.path.expanduser(config)])
if server_url:
supervisorctl_args.extend(['-s', server_url])
if username:
supervisorctl_args.extend(['-u', username])
if password:
supervisorctl_args.extend(['-p', password])
def run_supervisorctl(cmd, name=None, **kwargs):
args = list(supervisorctl_args) # copy the master args
args.append(cmd)
if name:
args.append(name)
return module.run_command(args, **kwargs)
def get_matched_processes():
matched = []
rc, out, err = run_supervisorctl('status')
for line in out.splitlines():
# One status line may look like one of these two:
# process not in group:
# echo_date_lonely RUNNING pid 7680, uptime 13:22:18
# process in group:
# echo_date_group:echo_date_00 RUNNING pid 7681, uptime 13:22:18
fields = [field for field in line.split(' ') if field != '']
process_name = fields[0]
status = fields[1]
if is_group:
# If there is ':', this process must be in a group.
if ':' in process_name:
group = process_name.split(':')[0]
if group != name:
continue
else:
continue
else:
if process_name != name:
continue
matched.append((process_name, status))
return matched
def take_action_on_processes(processes, status_filter, action, expected_result):
to_take_action_on = []
for process_name, status in processes:
if status_filter(status):
to_take_action_on.append(process_name)
if len(to_take_action_on) == 0:
module.exit_json(changed=False, name=name, state=state)
if module.check_mode:
module.exit_json(changed=True)
for process_name in to_take_action_on:
rc, out, err = run_supervisorctl(action, process_name)
if '%s: %s' % (process_name, expected_result) not in out:
module.fail_json(msg=out)
module.exit_json(changed=True, name=name, state=state, affected=to_take_action_on)
if state == 'restarted':
rc, out, err = run_supervisorctl('update')
processes = get_matched_processes()
take_action_on_processes(processes, lambda s: True, 'restart', 'started')
processes = get_matched_processes()
if state == 'present':
if len(processes) > 0:
module.exit_json(changed=False, name=name, state=state)
if module.check_mode:
module.exit_json(changed=True)
run_supervisorctl('reread', check_rc=True)
rc, out, err = run_supervisorctl('add', name)
if '%s: added process group' % name in out:
module.exit_json(changed=True, name=name, state=state)
else:
module.fail_json(msg=out, name=name, state=state)
if state == 'started':
take_action_on_processes(processes, lambda s: s != 'RUNNING', 'start', 'started')
if state == 'stopped':
take_action_on_processes(processes, lambda s: s == 'RUNNING', 'stop', 'stopped')
# import module snippets
from ansible.module_utils.basic import *
main()

@ -1,100 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
# $params is not currently used in this module
# $params = Parse-Args $args;
$result = New-Object psobject @{
ansible_facts = New-Object psobject
changed = $false
};
$osversion = [Environment]::OSVersion
$memory = @()
$memory += Get-WmiObject win32_Physicalmemory
$capacity = 0
$memory | foreach {$capacity += $_.Capacity}
$netcfg = Get-WmiObject win32_NetworkAdapterConfiguration
$ActiveNetcfg = @(); $ActiveNetcfg+= $netcfg | where {$_.ipaddress -ne $null}
$formattednetcfg = @()
foreach ($adapter in $ActiveNetcfg)
{
$thisadapter = New-Object psobject @{
interface_name = $adapter.description
dns_domain = $adapter.dnsdomain
default_gateway = $null
interface_index = $adapter.InterfaceIndex
}
if ($adapter.defaultIPGateway)
{
$thisadapter.default_gateway = $adapter.DefaultIPGateway[0].ToString()
}
$formattednetcfg += $thisadapter;$thisadapter = $null
}
Set-Attr $result.ansible_facts "ansible_interfaces" $formattednetcfg
Set-Attr $result.ansible_facts "ansible_hostname" $env:COMPUTERNAME;
Set-Attr $result.ansible_facts "ansible_fqdn" "$([System.Net.Dns]::GetHostByName((hostname)).HostName)"
Set-Attr $result.ansible_facts "ansible_system" $osversion.Platform.ToString()
Set-Attr $result.ansible_facts "ansible_os_family" "Windows"
Set-Attr $result.ansible_facts "ansible_distribution" $osversion.VersionString
Set-Attr $result.ansible_facts "ansible_distribution_version" $osversion.Version.ToString()
Set-Attr $result.ansible_facts "ansible_totalmem" $capacity
$ips = @()
Foreach ($ip in $netcfg.IPAddress) { If ($ip) { $ips += $ip } }
Set-Attr $result.ansible_facts "ansible_ip_addresses" $ips
$psversion = $PSVersionTable.PSVersion.Major
Set-Attr $result.ansible_facts "ansible_powershell_version" $psversion
$winrm_https_listener_parent_path = Get-ChildItem -Path WSMan:\localhost\Listener -Recurse | Where-Object {$_.PSChildName -eq "Transport" -and $_.Value -eq "HTTPS"} | select PSParentPath
if ($winrm_https_listener_parent_path ) {
$winrm_https_listener_path = $winrm_https_listener_parent_path.PSParentPath.Substring($winrm_https_listener_parent_path.PSParentPath.LastIndexOf("\"))
}
if ($winrm_https_listener_path)
{
$https_listener = Get-ChildItem -Path "WSMan:\localhost\Listener$winrm_https_listener_path"
}
if ($https_listener)
{
$winrm_cert_thumbprint = $https_listener | where {$_.Name -EQ "CertificateThumbprint" } | select Value
}
if ($winrm_cert_thumbprint)
{
$uppercase_cert_thumbprint = $winrm_cert_thumbprint.Value.ToString().ToUpper()
}
$winrm_cert_expiry = Get-ChildItem -Path Cert:\LocalMachine\My | where Thumbprint -EQ $uppercase_cert_thumbprint | select NotAfter
if ($winrm_cert_expiry)
{
Set-Attr $result.ansible_facts "ansible_winrm_certificate_expires" $winrm_cert_expiry.NotAfter.ToString("yyyy-MM-dd HH:mm:ss")
}
Exit-Json $result;

@ -1,46 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$params = Parse-Args $args;
$src = Get-Attr $params "src" (Get-Attr $params "path" $FALSE);
If (-not $src)
{
Fail-Json (New-Object psobject) "missing required argument: src";
}
If (Test-Path -PathType Leaf $src)
{
$bytes = [System.IO.File]::ReadAllBytes($src);
$content = [System.Convert]::ToBase64String($bytes);
$result = New-Object psobject @{
changed = $false
encoding = "base64"
content = $content
};
Exit-Json $result;
}
ElseIf (Test-Path -PathType Container $src)
{
Fail-Json (New-Object psobject) ("is a directory: " + $src);
}
Else
{
Fail-Json (New-Object psobject) ("file not found: " + $src);
}

@ -1,97 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Paul Durivage <paul.durivage@rackspace.com>, Trond Hindenes <trond@hindenes.com> and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_feature
version_added: "1.7"
short_description: Installs and uninstalls Windows Features
description:
- Installs or uninstalls Windows Roles or Features
options:
name:
description:
- Names of roles or features to install as a single feature or a comma-separated list of features
required: true
default: null
aliases: []
state:
description:
- State of the features or roles on the system
required: false
choices:
- present
- absent
default: present
aliases: []
restart:
description:
- Restarts the computer automatically when installation is complete, if restarting is required by the roles or features installed.
choices:
- yes
- no
default: null
aliases: []
include_sub_features:
description:
- Adds all subfeatures of the specified feature
choices:
- yes
- no
default: null
aliases: []
include_management_tools:
description:
- Adds the corresponding management tools to the specified feature
choices:
- yes
- no
default: null
aliases: []
author: Paul Durivage / Trond Hindenes
'''
EXAMPLES = '''
# This installs IIS.
# The names of features available for install can be run by running the following Powershell Command:
# PS C:\Users\Administrator> Import-Module ServerManager; Get-WindowsFeature
$ ansible -i hosts -m win_feature -a "name=Web-Server" all
$ ansible -i hosts -m win_feature -a "name=Web-Server,Web-Common-Http" all
# Playbook example
---
- name: Install IIS
hosts: all
gather_facts: false
tasks:
- name: Install IIS
win_feature:
name: "Web-Server"
state: absent
restart: yes
include_sub_features: yes
include_management_tools: yes
'''

@ -1,122 +0,0 @@
#!powershell
# This file is part of Ansible.
#
# Copyright 2014, Paul Durivage <paul.durivage@rackspace.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
Import-Module Servermanager;
$params = Parse-Args $args;
$result = New-Object psobject @{
changed = $false
}
If ($params.name) {
$name = $params.name
}
Else {
Fail-Json $result "mising required argument: name"
}
If ($params.state) {
$state = $params.state.ToString().ToLower()
If (($state -ne 'present') -and ($state -ne 'absent')) {
Fail-Json $result "state is '$state'; must be 'present' or 'absent'"
}
}
Elseif (!$params.state) {
$state = "present"
}
If ($params.restart) {
$restart = $params.restart | ConvertTo-Bool
}
Else
{
$restart = $false
}
if ($params.include_sub_features)
{
$includesubfeatures = $params.include_sub_features | ConvertTo-Bool
}
Else
{
$includesubfeatures = $false
}
if ($params.include_management_tools)
{
$includemanagementtools = $params.include_management_tools | ConvertTo-Bool
}
Else
{
$includemanagementtools = $false
}
If ($state -eq "present") {
try {
$featureresult = Add-WindowsFeature -Name $name -Restart:$restart -IncludeAllSubFeature:$includesubfeatures -IncludeManagementTools:$includemanagementtools
}
catch {
Fail-Json $result $_.Exception.Message
}
}
Elseif ($state -eq "absent") {
try {
$featureresult = Remove-WindowsFeature -Name $name -Restart:$restart
}
catch {
Fail-Json $result $_.Exception.Message
}
}
# Loop through results and create a hash containing details about
# each role/feature that is installed/removed
$installed_features = @()
#$featureresult.featureresult is filled if anything was changed
if ($featureresult.FeatureResult)
{
ForEach ($item in $featureresult.FeatureResult) {
$installed_features += New-Object psobject @{
id = $item.id.ToString()
display_name = $item.DisplayName
message = $item.Message.ToString()
restart_needed = $item.RestartNeeded.ToString()
skip_reason = $item.SkipReason.ToString()
success = $item.Success.ToString()
}
}
Set-Attr $result "feature_result" $installed_features
$result.changed = $true
}
Else
{
Set-Attr $result "feature_result" $null
}
Set-Attr $result "feature_success" $featureresult.Success.ToString()
Set-Attr $result "feature_exitcode" $featureresult.ExitCode.ToString()
Set-Attr $result "feature_restart_needed" $featureresult.RestartNeeded.ToString()
Exit-Json $result;

@ -1,57 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Paul Durivage <paul.durivage@rackspace.com>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_get_url
version_added: "1.7"
short_description: Fetches a file from a given URL
description:
- Fetches a file from a URL and saves to locally
options:
url:
description:
- The full URL of a file to download
required: true
default: null
aliases: []
dest:
description:
- The absolute path of the location to save the file at the URL. Be sure to include a filename and extension as appropriate.
required: false
default: yes
aliases: []
author: Paul Durivage
'''
EXAMPLES = '''
# Downloading a JPEG and saving it to a file with the ansible command.
# Note the "dest" is quoted rather instead of escaping the backslashes
$ ansible -i hosts -c winrm -m win_get_url -a "url=http://www.example.com/earthrise.jpg dest='C:\Users\Administrator\earthrise.jpg'" all
# Playbook example
- name: Download earthrise.jpg to 'C:\Users\RandomUser\earthrise.jpg'
win_get_url:
url: 'http://www.example.com/earthrise.jpg'
dest: 'C:\Users\RandomUser\earthrise.jpg'
'''

@ -1,56 +0,0 @@
#!powershell
# This file is part of Ansible.
#
# Copyright 2014, Paul Durivage <paul.durivage@rackspace.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$params = Parse-Args $args;
$result = New-Object psobject @{
win_get_url = New-Object psobject
changed = $false
}
If ($params.url) {
$url = $params.url
}
Else {
Fail-Json $result "mising required argument: url"
}
If ($params.dest) {
$dest = $params.dest
}
Else {
Fail-Json $result "missing required argument: dest"
}
$client = New-Object System.Net.WebClient
Try {
$client.DownloadFile($url, $dest)
$result.changed = $true
}
Catch {
Fail-Json $result "Error downloading $url to $dest"
}
Set-Attr $result.win_get_url "url" $url
Set-Attr $result.win_get_url "dest" $dest
Exit-Json $result;

@ -1,67 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Chris Hoffman <choffman@chathamfinancial.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_group
version_added: "1.7"
short_description: Add and remove local groups
description:
- Add and remove local groups
options:
name:
description:
- Name of the group
required: true
default: null
aliases: []
description:
description:
- Description of the group
required: false
default: null
aliases: []
state:
description:
- Create or remove the group
required: false
choices:
- present
- absent
default: present
aliases: []
author: Chris Hoffman
'''
EXAMPLES = '''
# Create a new group
win_group:
name: deploy
description: Deploy Group
state: present
# Remove a group
win_group:
name: deploy
state: absent
'''

@ -1,70 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Copyright 2014, Chris Hoffman <choffman@chathamfinancial.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$params = Parse-Args $args;
$result = New-Object PSObject;
Set-Attr $result "changed" $false;
If (-not $params.name.GetType) {
Fail-Json $result "missing required arguments: name"
}
If ($params.state) {
$state = $params.state.ToString().ToLower()
If (($state -ne "present") -and ($state -ne "absent")) {
Fail-Json $result "state is '$state'; must be 'present' or 'absent'"
}
}
Elseif (-not $params.state) {
$state = "present"
}
$adsi = [ADSI]"WinNT://$env:COMPUTERNAME"
$group = $adsi.Children | Where-Object {$_.SchemaClassName -eq 'group' -and $_.Name -eq $params.name }
try {
If ($state -eq "present") {
If (-not $group) {
$group = $adsi.Create("Group", $params.name)
$group.SetInfo()
Set-Attr $result "changed" $true
}
If ($params.description.GetType) {
IF (-not $group.description -or $group.description -ne $params.description) {
$group.description = $params.description
$group.SetInfo()
Set-Attr $result "changed" $true
}
}
}
ElseIf ($state -eq "absent" -and $group) {
$adsi.delete("Group", $group.Name.Value)
Set-Attr $result "changed" $true
}
}
catch {
Fail-Json $result $_.Exception.Message
}
Exit-Json $result

@ -1,58 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <matt@sivel.net>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_msi
version_added: "1.7"
short_description: Installs and uninstalls Windows MSI files
description:
- Installs or uninstalls a Windows MSI file that is already located on the
target server
options:
path:
description:
- File system path to the MSI file to install
required: true
state:
description:
- Whether the MSI file should be installed or uninstalled
choices:
- present
- absent
default: present
creates:
description:
- Path to a file created by installing the MSI to prevent from
attempting to reinstall the package on every run
author: Matt Martz
'''
EXAMPLES = '''
# Install an MSI file
- win_msi: path=C:\\\\7z920-x64.msi
# Uninstall an MSI file
- win_msi: path=C:\\\\7z920-x64.msi state=absent
'''

@ -1,63 +0,0 @@
#!powershell
# (c) 2014, Matt Martz <matt@sivel.net>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$params = Parse-Args $args;
$result = New-Object psobject;
Set-Attr $result "changed" $false;
If (-not $params.path.GetType)
{
Fail-Json $result "missing required arguments: path"
}
$extra_args = ""
If ($params.extra_args.GetType)
{
$extra_args = $params.extra_args;
}
If ($params.creates.GetType -and $params.state.GetType -and $params.state -ne "absent")
{
If (Test-File $creates)
{
Exit-Json $result;
}
}
$logfile = [IO.Path]::GetTempFileName();
if ($params.state.GetType -and $params.state -eq "absent")
{
msiexec.exe /x $params.path /qb /l $logfile $extra_args;
}
Else
{
msiexec.exe /i $params.path /qb /l $logfile $extra_args;
}
Set-Attr $result "changed" $true;
$logcontents = Get-Content $logfile;
Remove-Item $logfile;
Set-Attr $result "log" $logcontents;
Exit-Json $result;

@ -1,48 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_ping
version_added: "1.7"
short_description: A windows version of the classic ping module.
description:
- Checks management connectivity of a windows host
options:
data:
description:
- Alternate data to return instead of 'pong'
required: false
default: 'pong'
aliases: []
author: Chris Church
'''
EXAMPLES = '''
# Test connectivity to a windows host
ansible winserver -m win_ping
# Example from an Ansible Playbook
- action: win_ping
'''

@ -1,29 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$params = Parse-Args $args;
$data = Get-Attr $params "data" "pong";
$result = New-Object psobject @{
changed = $false
ping = $data
};
Exit-Json $result;

@ -1,72 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Chris Hoffman <choffman@chathamfinancial.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_service
version_added: "1.7"
short_description: Manages Windows services
description:
- Manages Windows services
options:
name:
description:
- Name of the service
required: true
default: null
aliases: []
start_mode:
description:
- Set the startup type for the service
required: false
choices:
- auto
- manual
- disabled
state:
description:
- C(started)/C(stopped) are idempotent actions that will not run
commands unless necessary. C(restarted) will always bounce the
service.
required: false
choices:
- started
- stopped
- restarted
default: null
aliases: []
author: Chris Hoffman
'''
EXAMPLES = '''
# Restart a service
win_service:
name: spooler
state: restarted
# Set service startup mode to auto and ensure it is started
win_service:
name: spooler
start_mode: auto
state: started
'''

@ -1,106 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Copyright 2014, Chris Hoffman <choffman@chathamfinancial.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$params = Parse-Args $args;
$result = New-Object PSObject;
Set-Attr $result "changed" $false;
If (-not $params.name.GetType)
{
Fail-Json $result "missing required arguments: name"
}
If ($params.state) {
$state = $params.state.ToString().ToLower()
If (($state -ne 'started') -and ($state -ne 'stopped') -and ($state -ne 'restarted')) {
Fail-Json $result "state is '$state'; must be 'started', 'stopped', or 'restarted'"
}
}
If ($params.start_mode) {
$startMode = $params.start_mode.ToString().ToLower()
If (($startMode -ne 'auto') -and ($startMode -ne 'manual') -and ($startMode -ne 'disabled')) {
Fail-Json $result "start mode is '$startMode'; must be 'auto', 'manual', or 'disabled'"
}
}
$svcName = $params.name
$svc = Get-Service -Name $svcName -ErrorAction SilentlyContinue
If (-not $svc) {
Fail-Json $result "Service '$svcName' not installed"
}
# Use service name instead of display name for remaining actions.
If ($svcName -ne $svc.ServiceName) {
$svcName = $svc.ServiceName
}
Set-Attr $result "name" $svc.ServiceName
Set-Attr $result "display_name" $svc.DisplayName
$svcMode = Get-WmiObject -Class Win32_Service -Property StartMode -Filter "Name='$svcName'"
If ($startMode) {
If ($svcMode.StartMode.ToLower() -ne $startMode) {
Set-Service -Name $svcName -StartupType $startMode
Set-Attr $result "changed" $true
Set-Attr $result "start_mode" $startMode
}
Else {
Set-Attr $result "start_mode" $svcMode.StartMode.ToLower()
}
}
Else {
Set-Attr $result "start_mode" $svcMode.StartMode.ToLower()
}
If ($state) {
If ($state -eq "started" -and $svc.Status -ne "Running") {
try {
Start-Service -Name $svcName -ErrorAction Stop
}
catch {
Fail-Json $result $_.Exception.Message
}
Set-Attr $result "changed" $true;
}
ElseIf ($state -eq "stopped" -and $svc.Status -ne "Stopped") {
try {
Stop-Service -Name $svcName -ErrorAction Stop
}
catch {
Fail-Json $result $_.Exception.Message
}
Set-Attr $result "changed" $true;
}
ElseIf ($state -eq "restarted") {
try {
Restart-Service -Name $svcName -ErrorAction Stop
}
catch {
Fail-Json $result $_.Exception.Message
}
Set-Attr $result "changed" $true;
}
}
$svc.Refresh()
Set-Attr $result "state" $svc.Status.ToString().ToLower()
Exit-Json $result;

@ -1,52 +0,0 @@
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub, actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_stat
version_added: "1.7"
short_description: returns information about a Windows file
description:
- Returns information about a Windows file
options:
path:
description:
- The full path of the file/object to get the facts of; both forward and
back slashes are accepted.
required: true
default: null
aliases: []
get_md5:
description:
- Whether to return the md5 sum of the file
required: false
default: yes
aliases: []
author: Chris Church
'''
EXAMPLES = '''
# Obtain information about a file
- win_stat: path=C:\\foo.ini
register: file_info
- debug: var=file_info
'''

@ -1,63 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$params = Parse-Args $args;
$path = Get-Attr $params "path" $FALSE;
If ($path -eq $FALSE)
{
Fail-Json (New-Object psobject) "missing required argument: path";
}
$get_md5 = Get-Attr $params "get_md5" $TRUE | ConvertTo-Bool;
$result = New-Object psobject @{
stat = New-Object psobject
changed = $false
};
If (Test-Path $path)
{
Set-Attr $result.stat "exists" $TRUE;
$info = Get-Item $path;
If ($info.Directory) # Only files have the .Directory attribute.
{
Set-Attr $result.stat "isdir" $FALSE;
Set-Attr $result.stat "size" $info.Length;
}
Else
{
Set-Attr $result.stat "isdir" $TRUE;
}
}
Else
{
Set-Attr $result.stat "exists" $FALSE;
}
If ($get_md5 -and $result.stat.exists -and -not $result.stat.isdir)
{
$sp = new-object -TypeName System.Security.Cryptography.MD5CryptoServiceProvider;
$fp = [System.IO.File]::Open($path, [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read);
$hash = [System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower();
$fp.Dispose();
Set-Attr $result.stat "md5" $hash;
}
Exit-Json $result;

@ -1,71 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <matt@sivel.net>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_user
version_added: "1.7"
short_description: Manages local Windows user accounts
description:
- Manages local Windows user accounts
options:
name:
description:
- Username of the user to manage
required: true
default: null
aliases: []
password:
description:
- Password for the user (plain text)
required: true
default: null
aliases: []
state:
description:
- Whether to create or delete a user
required: false
choices:
- present
- absent
default: present
aliases: []
author: Paul Durivage
'''
EXAMPLES = '''
# Ad-hoc example
$ ansible -i hosts -m win_user -a "name=bob password=Password12345" all
$ ansible -i hosts -m win_user -a "name=bob password=Password12345 state=absent" all
# Playbook example
---
- name: Add a user
hosts: all
gather_facts: false
tasks:
- name: Add User
win_user:
name: ansible
password: "@ns1bl3"
'''

@ -1,116 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Copyright 2014, Paul Durivage <paul.durivage@rackspace.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
########
$adsi = [ADSI]"WinNT://$env:COMPUTERNAME"
function Get-User($user) {
$adsi.Children | where {$_.SchemaClassName -eq 'user' -and $_.Name -eq $user }
return
}
function Create-User([string]$user, [string]$passwd) {
$adsiuser = $adsi.Create("User", $user)
$adsiuser.SetPassword($passwd)
$adsiuser.SetInfo()
$adsiuser
return
}
function Update-Password($user, [string]$passwd) {
$user.SetPassword($passwd)
$user.SetInfo()
}
function Delete-User($user) {
$adsi.delete("user", $user.Name.Value)
}
########
$params = Parse-Args $args;
$result = New-Object psobject @{
changed = $false
};
If (-not $params.name.GetType)
{
Fail-Json $result "missing required arguments: name"
}
If ($params.state) {
$state = $params.state.ToString().ToLower()
If (($state -ne 'present') -and ($state -ne 'absent')) {
Fail-Json $result "state is '$state'; must be 'present' or 'absent'"
}
}
Elseif (!$params.state) {
$state = "present"
}
If ((-not $params.password.GetType) -and ($state -eq 'present'))
{
Fail-Json $result "missing required arguments: password"
}
$username = Get-Attr $params "name"
$password = Get-Attr $params "password"
$user_obj = Get-User $username
if ($state -eq 'present') {
# Add or update user
try {
if ($user_obj.GetType) {
Update-Password $user_obj $password
}
else {
Create-User $username $password
}
$result.changed = $true
$user_obj = Get-User $username
}
catch {
Fail-Json $result $_.Exception.Message
}
}
else {
# Remove user
try {
if ($user_obj.GetType) {
Delete-User $user_obj
$result.changed = $true
}
else {
Set-Attr $result "msg" "User '$username' was not found"
}
}
catch {
Fail-Json $result $_.Exception.Message
}
}
# Set-Attr $result "user" $user_obj
Set-Attr $result "user_name" $user_obj.Name
Set-Attr $result "user_fullname" $user_obj.FullName
Set-Attr $result "user_path" $user_obj.Path
Exit-Json $result;
Loading…
Cancel
Save