Allow conditional imports, see examples/playbook3.yml comments for a full explanation. Extensive

refactoring of playbooks now warranted, which we'll do before we move on.   This variable assignment
system makes nearly all possible magic possible, for we can use these variables however we like,
even as module names!
pull/70/head
Michael DeHaan 13 years ago
parent 5ed2b894d9
commit 4de7bbb169

@ -52,6 +52,15 @@ class PlaybookCallbacks(object):
def on_ok(self, host, host_result): def on_ok(self, host, host_result):
print "ok: [%s]\n" % (host) print "ok: [%s]\n" % (host)
def on_setup_primary(self):
print "preparing nodes..."
def on_setup_secondary(self):
print "preparing conditional imports..."
def on_import_for_host(self, host, imported_file):
pass
def on_play_start(self, pattern): def on_play_start(self, pattern):
print "PLAY [%s] ****************************\n" % pattern print "PLAY [%s] ****************************\n" % pattern

@ -1,26 +1,51 @@
--- ---
# this is not so much an example playbook file as a playbook we sometimes use # this is a demo of conditional imports. This is a powerful concept
# for testing. I have chosen to not comment this one so folks can get # and can be used to use the same recipe for different types of hosts,
# an idea of what a concise playbook can look like... # based on variables that bubble up from the hosts from tools such
# as ohai or facter.
#
# Here's an example use case:
#
# what to do if the service for apache is named 'httpd' on CentOS
# but is named 'apache' on Debian?
# there is only one play in this playbook, it runs on all hosts
# as root
- hosts: all - hosts: all
user: root user: root
vars:
http_port: 80 # we have a common list of variables stored in /vars/external_vars.yml
max_clients: 200 # that we will always import
# next, we want to import files that are different per operating system
# and if no per operating system file is found, load a defaults file.
# for instance, if the OS was "CentOS", we'd try to load vars/CentOS.yml.
# if that was found, we would immediately stop. However if that wasn't
# present, we'd try to load vars/defaults.yml. If that in turn was not
# found, we would fail immediately, because we had gotten to the end of
# the list without importing anything.
vars_files:
- "vars/external_vars.yml"
- [ "vars/$facter_operatingsystem.yml", "vars/defaults.yml" ]
# and this is just a regular task line from a playbook, as we're used to.
# but with variables in it that come from above. Note that the variables
# from above are *also* available in templates
tasks: tasks:
- name: simulate long running op, wait for 45s, poll every 5
action: command /bin/sleep 15 - name: ensure apache is latest
async: 45 action: $packager pkg=$apache state=latest
poll: 5
- include: tasks/base.yml favcolor=blue
- name: write the foo config file using vars set above
action: template src=foo.j2 dest=/etc/some_random_foo.conf
notify:
- restart apache
- name: ensure apache is running - name: ensure apache is running
action: service name=httpd state=running action: service name=$apache state=running
- name: pointless test action - name: template step
action: command /bin/echo {{ http_port }} action: template src=/tmp/test.j2 dest=/tmp/test.out
handlers:
- include: handlers/handlers.yml

@ -0,0 +1,3 @@
---
apache: httpd
packager: yum

@ -0,0 +1,3 @@
---
packager: aptitude
apache: apache

@ -21,7 +21,6 @@ import ansible.runner
import ansible.constants as C import ansible.constants as C
from ansible import utils from ansible import utils
from ansible import errors from ansible import errors
import yaml
import shlex import shlex
import os import os
import time import time
@ -89,15 +88,6 @@ class PlayBook(object):
vars = play.get('vars', {}) vars = play.get('vars', {})
if type(vars) != dict: if type(vars) != dict:
raise errors.AnsibleError("'vars' section must contain only key/value pairs") raise errors.AnsibleError("'vars' section must contain only key/value pairs")
vars_files = play.get('vars_files', [])
for f in vars_files:
path = utils.path_dwim(dirname, f)
# FIXME: better error handling if not valid YAML
# or file not found
# raise typed exception
data = file(path).read()
data = yaml.load(data)
vars.update(data)
return vars return vars
def _include_tasks(self, play, task, dirname, new_tasks): def _include_tasks(self, play, task, dirname, new_tasks):
@ -111,7 +101,7 @@ class PlayBook(object):
(k,v) = x.split("=") (k,v) = x.split("=")
inject_vars[k] = v inject_vars[k] = v
included = utils.template_from_file(path, inject_vars) included = utils.template_from_file(path, inject_vars)
included = yaml.load(included) included = utils.parse_yaml(included)
for x in included: for x in included:
new_tasks.append(x) new_tasks.append(x)
@ -119,7 +109,7 @@ class PlayBook(object):
path = utils.path_dwim(dirname, handler['include']) path = utils.path_dwim(dirname, handler['include'])
inject_vars = self._get_vars(play, dirname) inject_vars = self._get_vars(play, dirname)
included = utils.template_from_file(path, inject_vars) included = utils.template_from_file(path, inject_vars)
included = yaml.load(included) included = utils.parse_yaml(included)
for x in included: for x in included:
new_handlers.append(x) new_handlers.append(x)
@ -127,7 +117,7 @@ class PlayBook(object):
''' load YAML file, including handling for imported files ''' ''' load YAML file, including handling for imported files '''
dirname = os.path.dirname(playbook) dirname = os.path.dirname(playbook)
playbook = yaml.load(file(playbook).read()) playbook = utils.parse_yaml_from_file(playbook)
for play in playbook: for play in playbook:
tasks = play.get('tasks',[]) tasks = play.get('tasks',[])
@ -355,7 +345,7 @@ class PlayBook(object):
# walk through the results and build up # walk through the results and build up
# summary information about successes and # summary information about successes and
# failures. TODO: split into subfunction # failures. FIXME: TODO: split into subfunction!
dark = results.get("dark", {}) dark = results.get("dark", {})
contacted = results.get("contacted", {}) contacted = results.get("contacted", {})
@ -422,22 +412,51 @@ class PlayBook(object):
x['run'] = [] x['run'] = []
x['run'].append(host) x['run'].append(host)
def _run_play(self, pg): def _do_setup_step(self, pattern, vars, user, host_list, vars_files=None):
''' ''' push variables down to the systems and get variables+facts back up '''
run a list of tasks for a given pattern, in order
'''
# get configuration information about the pattern
pattern = pg['hosts']
vars = self._get_vars(pg, self.basedir)
tasks = pg['tasks']
handlers = pg['handlers']
user = pg.get('user', C.DEFAULT_REMOTE_USER)
self.host_list, groups = ansible.runner.Runner.parse_hosts(self.host_list) # this enables conditional includes like $facter_os.yml and is only done
# after the original pass when we have that data.
#
# FIXME: refactor into subfunction
# FIXME: save parsed variable results in memory to avoid excessive re-reading/parsing
# FIXME: currently parses imports for hosts not in the pattern, that is not wrong, but it's
# not super optimized yet either, because we wouldn't have hit them, ergo
# it will raise false errors if there is no defaults variable file without any $vars
# in it, which could happen on uncontacted hosts.
if vars_files is not None:
self.callbacks.on_setup_secondary()
for host in host_list:
cache_vars = SETUP_CACHE.get(host,{})
SETUP_CACHE[host] = {}
for filename in vars_files:
if type(filename) == list:
# loop over all filenames, loading the first one, and failing if
# none found
found = False
sequence = []
for real_filename in filename:
filename2 = utils.path_dwim(self.basedir, utils.template(real_filename, cache_vars))
sequence.append(filename2)
if os.path.exists(filename2):
found = True
data = utils.parse_yaml_from_file(filename2)
SETUP_CACHE[host].update(data)
self.callbacks.on_import_for_host(host, filename2)
break
if not found:
raise errors.AnsibleError("no files matched for vars_files import sequence: %s" % sequence)
self.callbacks.on_play_start(pattern) else:
filename2 = utils.path_dwim(self.basedir, utils.template(filename, cache_vars))
if not os.path.exists(filename2):
raise errors.AnsibleError("no file matched for vars_file import: %s" % filename2)
data = utils.parse_yaml_from_file(filename2)
SETUP_CACHE[host].update(data)
self.callbacks.on_import_for_host(host, filename2)
else:
self.callbacks.on_setup_primary()
# first run the setup task on every node, which gets the variables # first run the setup task on every node, which gets the variables
# written to the JSON file and will also bubble facts back up via # written to the JSON file and will also bubble facts back up via
@ -473,13 +492,62 @@ class PlayBook(object):
# now for each result, load into the setup cache so we can # now for each result, load into the setup cache so we can
# let runner template out future commands # let runner template out future commands
setup_ok = setup_results.get('contacted', {}) setup_ok = setup_results.get('contacted', {})
if vars_files is None:
# first pass only or we'll erase good work
for (host, result) in setup_ok.iteritems(): for (host, result) in setup_ok.iteritems():
SETUP_CACHE[host] = result SETUP_CACHE[host] = result
host_list = self._prune_failed_hosts(host_list)
return host_list
def _run_play(self, pg):
'''
run a list of tasks for a given pattern, in order
'''
# get configuration information about the pattern
pattern = pg['hosts']
vars = self._get_vars(pg, self.basedir)
vars_files = pg.get('vars_files', {})
tasks = pg.get('tasks', [])
handlers = pg.get('handlers', [])
user = pg.get('user', C.DEFAULT_REMOTE_USER)
self.host_list, groups = ansible.runner.Runner.parse_hosts(self.host_list)
self.callbacks.on_play_start(pattern)
# push any variables down to the system # and get facts/ohai/other data back up
host_list = self._do_setup_step(pattern, vars, user, self.host_list, None)
# now with that data, handle contentional variable file imports!
if len(vars_files) > 0:
host_list = self._do_setup_step(pattern, vars, user, host_list, vars_files)
# FIXME: DUPLICATE CODE
# dark_hosts = setup_results.get('dark',{})
#contacted_hosts = setup_results.get('contacted',{})
#for (host, error) in dark_hosts.iteritems():
# self.callbacks.on_dark_host(host, error)
# self.dark[host] = 1
#for (host, host_result) in contacted_hosts.iteritems():
# if 'failed' in host_result:
# self.callbacks.on_failed(host, host_result)
# self.failures[host] = 1
# FIXME: DUPLICATE CODE
# now for each result, load into the setup cache so we can
# let runner template out future commands
#setup_ok = setup_results.get('contacted', {})
#for (host, result) in setup_ok.iteritems():
# SETUP_CACHE[host] = result
# run all the top level tasks, these get run on every node # run all the top level tasks, these get run on every node
for task in tasks: for task in tasks:
self._run_task( self._run_task(
pattern=pattern, pattern=pattern,
host_list=host_list,
task=task, task=task,
handlers=handlers, handlers=handlers,
remote_user=user remote_user=user

@ -74,7 +74,7 @@ class Runner(object):
remote_pass=C.DEFAULT_REMOTE_PASS, remote_pass=C.DEFAULT_REMOTE_PASS,
background=0, background=0,
basedir=None, basedir=None,
setup_cache={}, setup_cache=None,
transport='paramiko', transport='paramiko',
verbose=False): verbose=False):
@ -92,6 +92,8 @@ class Runner(object):
setup_cache -- used only by playbook (complex explanation pending) setup_cache -- used only by playbook (complex explanation pending)
''' '''
if setup_cache is None:
setup_cache = {}
self.setup_cache = setup_cache self.setup_cache = setup_cache
self.host_list, self.groups = self.parse_hosts(host_list) self.host_list, self.groups = self.parse_hosts(host_list)
@ -152,7 +154,7 @@ class Runner(object):
# looks like a group # looks like a group
group_name = item.replace("[","").replace("]","").lstrip().rstrip() group_name = item.replace("[","").replace("]","").lstrip().rstrip()
groups[group_name] = [] groups[group_name] = []
else: elif item != "":
# looks like a regular host # looks like a regular host
groups[group_name].append(item) groups[group_name].append(item)
results.append(item) results.append(item)
@ -286,13 +288,14 @@ class Runner(object):
args = module_args args = module_args
if type(args) == list: if type(args) == list:
args = [ str(x) for x in module_args ] args = " ".join([ str(x) for x in module_args ])
args = " ".join(args)
# by default the args to substitute in the action line are those from the setup cache
inject_vars = self.setup_cache.get(conn.host,{}) inject_vars = self.setup_cache.get(conn.host,{})
inject2 = {}
# if the host file was an external script, execute it with the hostname # if the host file was an external script, execute it with the hostname
# as a first parameter to get the variables to use for the host # as a first parameter to get the variables to use for the host
inject2 = {}
if Runner._external_variable_script is not None: if Runner._external_variable_script is not None:
host = conn.host host = conn.host
cmd = subprocess.Popen([Runner._external_variable_script, host], cmd = subprocess.Popen([Runner._external_variable_script, host],
@ -309,12 +312,15 @@ class Runner(object):
Runner._external_variable_script, Runner._external_variable_script,
host host
)) ))
# store injected variables in the templates
inject_vars.update(inject2) inject_vars.update(inject2)
# store injected variables in the templates
if self.module_name == 'setup': if self.module_name == 'setup':
for (k,v) in inject2.iteritems(): for (k,v) in inject_vars.iteritems():
args = "%s %s=%s" % (args, k, v) if not k.startswith('facter_') and not k.startswith('ohai_'):
if v.find(" ") != -1:
v = "\"%s\"" % v
args += " %s=%s" % (k, v)
# the metadata location for the setup module is transparently managed # the metadata location for the setup module is transparently managed
# since it's an 'internals' module, kind of a black box. See playbook # since it's an 'internals' module, kind of a black box. See playbook
@ -338,7 +344,7 @@ class Runner(object):
# ***************************************************** # *****************************************************
def _execute_normal_module(self, conn, host, tmp): def _execute_normal_module(self, conn, host, tmp, module_name):
''' '''
transfer & execute a module that is not 'copy' or 'template' transfer & execute a module that is not 'copy' or 'template'
because those require extra work. because those require extra work.
@ -346,18 +352,19 @@ class Runner(object):
# hack to make the 'shell' module keyword really be executed # hack to make the 'shell' module keyword really be executed
# by the command module # by the command module
if self.module_name == 'shell': module_args = self.module_args
self.module_name = 'command' if module_name == 'shell':
self.module_args.append("#USE_SHELL") module_name = 'command'
module_args.append("#USE_SHELL")
module = self._transfer_module(conn, tmp, self.module_name) module = self._transfer_module(conn, tmp, module_name)
result = self._execute_module(conn, tmp, module, self.module_args) result = self._execute_module(conn, tmp, module, module_args)
# when running the setup module, which pushes vars to the host and ALSO # when running the setup module, which pushes vars to the host and ALSO
# returns them (+factoids), store the variables that were returned such that commands # returns them (+factoids), store the variables that were returned such that commands
# run AFTER setup use these variables for templating when executed # run AFTER setup use these variables for templating when executed
# from playbooks # from playbooks
if self.module_name == 'setup': if module_name == 'setup':
host = conn.host host = conn.host
try: try:
var_result = utils.parse_json(result) var_result = utils.parse_json(result)
@ -377,7 +384,7 @@ class Runner(object):
# ***************************************************** # *****************************************************
def _execute_async_module(self, conn, host, tmp): def _execute_async_module(self, conn, host, tmp, module_name):
''' '''
transfer the given module name, plus the async module transfer the given module name, plus the async module
and then run the async module wrapping the other module and then run the async module wrapping the other module
@ -385,13 +392,14 @@ class Runner(object):
# hack to make the 'shell' module keyword really be executed # hack to make the 'shell' module keyword really be executed
# by the command module # by the command module
if self.module_name == 'shell': module_args = self.module_args
self.module_name = 'command' if module_name == 'shell':
self.module_args.append("#USE_SHELL") module_name = 'command'
module_args.append("#USE_SHELL")
async = self._transfer_module(conn, tmp, 'async_wrapper') async = self._transfer_module(conn, tmp, 'async_wrapper')
module = self._transfer_module(conn, tmp, self.module_name) module = self._transfer_module(conn, tmp, module_name)
result = self._execute_module(conn, tmp, async, self.module_args, result = self._execute_module(conn, tmp, async, module_args,
async_module=module, async_module=module,
async_jid=self.generated_jid, async_jid=self.generated_jid,
async_limit=self.background async_limit=self.background
@ -518,17 +526,21 @@ class Runner(object):
# or a request to use the copy or template # or a request to use the copy or template
# module, call the appropriate executor function # module, call the appropriate executor function
ok, conn = self._connect(host) ok, conn = self._connect(host)
if not ok: if not ok:
return [ host, False, conn ] return [ host, False, conn ]
cache = self.setup_cache.get(host, {})
module_name = utils.template(self.module_name, cache)
tmp = self._get_tmp_path(conn) tmp = self._get_tmp_path(conn)
result = None result = None
if self.module_name not in [ 'copy', 'template' ]: if self.module_name not in [ 'copy', 'template' ]:
if self.background == 0: if self.background == 0:
result = self._execute_normal_module(conn, host, tmp) result = self._execute_normal_module(conn, host, tmp, module_name)
else: else:
result = self._execute_async_module(conn, host, tmp) result = self._execute_async_module(conn, host, tmp, module_name)
elif self.module_name == 'copy': elif self.module_name == 'copy':
result = self._execute_copy(conn, host, tmp) result = self._execute_copy(conn, host, tmp)

@ -22,6 +22,7 @@ import os
import shlex import shlex
import re import re
import jinja2 import jinja2
import yaml
try: try:
import json import json
@ -268,5 +269,11 @@ def template_from_file(path, vars):
data = file(path).read() data = file(path).read()
return template(data, vars) return template(data, vars)
def parse_yaml(data):
return yaml.load(data)
def parse_yaml_from_file(path):
data = file(path).read()
return parse_yaml(data)

@ -0,0 +1,4 @@
---
# could test something different here but want people running tests on
# different OS platforms to still have passing tests
testing: default

@ -26,6 +26,15 @@ class TestCallbacks(object):
def on_start(self): def on_start(self):
self.events.append('start') self.events.append('start')
def on_setup_primary(self):
self.events.append([ 'primary_setup' ])
def on_setup_secondary(self):
self.events.append([ 'secondary_setup' ])
def on_import_for_host(self, host, filename):
self.events.append([ 'import', [ host, filename ]])
def on_task_start(self, name, is_conditional): def on_task_start(self, name, is_conditional):
self.events.append([ 'task start', [ name, is_conditional ]]) self.events.append([ 'task start', [ name, is_conditional ]])

@ -0,0 +1,3 @@
---
duck: quack
cow: moo

@ -0,0 +1,2 @@
---
testing: default

@ -7,6 +7,26 @@
"all" "all"
] ]
], ],
[
"primary_setup"
],
[
"secondary_setup"
],
[
"import",
[
"127.0.0.1",
"/home/mdehaan/ansible/test/common_vars.yml"
]
],
[
"import",
[
"127.0.0.1",
"/home/mdehaan/ansible/test/CentOS.yml"
]
],
[ [
"task start", "task start",
[ [
@ -68,6 +88,25 @@
} }
] ]
], ],
[
"task start",
[
"test vars_files imports",
false
]
],
[
"ok",
[
"127.0.0.1",
{
"cmd": "echo quack moo default ",
"rc": 0,
"stderr": "",
"stdout": "quack moo default"
}
]
],
[ [
"task start", "task start",
[ [
@ -196,7 +235,8 @@
"changed": 2, "changed": 2,
"dark": 0, "dark": 0,
"failed": 0, "failed": 0,
"resources": 8 "resources": 9
} }
} }
} }

@ -4,6 +4,9 @@
vars: vars:
answer: "Wuh, I think so, Brain, but if we didn't have ears, we'd look like weasels." answer: "Wuh, I think so, Brain, but if we didn't have ears, we'd look like weasels."
port: 5150 port: 5150
vars_files:
- common_vars.yml
- [ '$facter_operatingsystem.yml', 'default_os.yml' ]
tasks: tasks:
@ -16,6 +19,9 @@
- name: test basic shell, plus two ways to dereference a variable - name: test basic shell, plus two ways to dereference a variable
action: shell echo $HOME $port {{ port }} action: shell echo $HOME $port {{ port }}
- name: test vars_files imports
action: shell echo $duck $cow $testing
# in the command below, the test file should contain a valid template # in the command below, the test file should contain a valid template
# and trigger the change handler # and trigger the change handler

@ -0,0 +1,71 @@
---
# this is an annotated example of some features available in playbooks
# it shows how to make sure packages are updated, how to make sure
# services are running, and how to template files. It also demos
# change handlers that can restart things (or trigger other actions)
# when resources change. For more advanced examples, see example2.yml
# on all hosts, run as the user root...
- hosts: all
user: root
# make these variables available inside of templates
# for when we use the 'template' action/module later on...
vars:
http_port: 80
max_clients: 200
# define the tasks that are part of this play...
tasks:
# task #1 is to run an arbitrary command
# we'll simulate a long running task, wait for up to 45 seconds, poll every 5
# obviously this does nothing useful but you get the idea
- name: longrunner
action: command /bin/sleep 15
async: 45
poll: 5
# let's demo file operations.
#
# We can 'copy' files or 'template' them instead, using jinja2
# as the templating engine. This is done using the variables
# from the vars section above mixed in with variables bubbled up
# automatically from tools like facter and ohai. 'copy'
# works just like 'template' but does not do variable subsitution.
#
# If and only if the file changes, restart apache at the very
# end of the playbook run
- name: write some_random_foo configuration
action: template src=templates/foo.j2 dest=/etc/some_random_foo.conf
notify:
- restart apache
# make sure httpd is installed at the latest version
- name: install httpd
action: yum pkg=httpd state=latest
# make sure httpd is running
- name: httpd start
action: service name=httpd state=running
# handlers are only run when things change, at the very end of each
# play. Let's define some. The names are significant and must
# match the 'notify' sections above
handlers:
# this particular handler is run when some_random_foo.conf
# is changed, and only then
- name: restart apache
action: service name=httpd state=restarted

@ -0,0 +1,71 @@
---
# this is an annotated example of some features available in playbooks
# it shows how to make sure packages are updated, how to make sure
# services are running, and how to template files. It also demos
# change handlers that can restart things (or trigger other actions)
# when resources change. For more advanced examples, see example2.yml
# on all hosts, run as the user root...
- hosts: all
user: root
# make these variables available inside of templates
# for when we use the 'template' action/module later on...
vars:
http_port: 80
max_clients: 200
# define the tasks that are part of this play...
tasks:
# task #1 is to run an arbitrary command
# we'll simulate a long running task, wait for up to 45 seconds, poll every 5
# obviously this does nothing useful but you get the idea
- name: longrunner
action: command /bin/sleep 15
async: 45
poll: 5
# let's demo file operations.
#
# We can 'copy' files or 'template' them instead, using jinja2
# as the templating engine. This is done using the variables
# from the vars section above mixed in with variables bubbled up
# automatically from tools like facter and ohai. 'copy'
# works just like 'template' but does not do variable subsitution.
#
# If and only if the file changes, restart apache at the very
# end of the playbook run
- name: write some_random_foo configuration
action: template src=templates/foo.j2 dest=/etc/some_random_foo.conf
notify:
- restart apache
# make sure httpd is installed at the latest version
- name: install httpd
action: yum pkg=httpd state=latest
# make sure httpd is running
- name: httpd start
action: service name=httpd state=running
# handlers are only run when things change, at the very end of each
# play. Let's define some. The names are significant and must
# match the 'notify' sections above
handlers:
# this particular handler is run when some_random_foo.conf
# is changed, and only then
- name: restart apache
action: service name=httpd state=restarted
Loading…
Cancel
Save