mirror of https://github.com/ansible/ansible.git
Transition inventory into plugins (#23001)
* draft new inventory plugin arch, yaml sample - split classes, moved out of init - extra debug statements - allow mulitple invenotry files - dont add hosts more than once - simplified host vars - since now we can have multiple, inventory_dir/file needs to be per host - ported yaml/script/ini/virtualbox plugins, dir is 'built in manager' - centralized localhost handling - added plugin docs - leaner meaner inventory (split to data + manager) - moved noop vars plugin - added 'postprocessing' inventory plugins - fixed ini plugin, better info on plugin run group declarations can appear in any position relative to children entry that contains them - grouphost_vars loading as inventory plugin (postprocessing) - playbook_dir allways full path - use bytes for file operations - better handling of empty/null sources - added test target that skips networking modules - now var manager loads play group/host_vars independant from inventory - centralized play setup repeat code - updated changelog with inv features - asperioribus verbis spatium album - fixed dataloader to new sig - made yaml plugin more resistant to bad data - nicer error msgs - fixed undeclared group detection - fixed 'ungrouping' - docs updated s/INI/file/ as its not only format - made behaviour of var merge a toggle - made 'source over group' path follow existing rule for var precedence - updated add_host/group from strategy - made host_list a plugin and added it to defaults - added advanced_host_list as example variation - refactored 'display' to be availbe by default in class inheritance - optimized implicit handling as per @pilou's feedback - removed unused code and tests - added inventory cache and vbox plugin now uses it - added _compose method for variable expressions in plugins - vbox plugin now uses 'compose' - require yaml extension for yaml - fix for plugin loader to always add original_path, even when not using all() - fix py3 issues - added --inventory as clearer option - return name when stringifying host objects - ajdust checks to code moving * reworked vars and vars precedence - vars plugins now load group/host_vars dirs - precedence for host vars is now configurable - vars_plugins been reworked - removed unused vars cache - removed _gathered_facts as we are not keeping info in host anymore - cleaned up tests - fixed ansible-pull to work with new inventory - removed version added notation to please rst check - inventory in config relative to config - ensures full paths on passed inventories * implicit localhost connection localpull/24952/head
parent
91a72ce7da
commit
8f97aef1a3
@ -1,903 +0,0 @@
|
|||||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
#############################################
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import fnmatch
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import itertools
|
|
||||||
|
|
||||||
|
|
||||||
from ansible import constants as C
|
|
||||||
from ansible.errors import AnsibleError
|
|
||||||
from ansible.inventory.dir import InventoryDirectory, get_file_parser
|
|
||||||
from ansible.inventory.group import Group
|
|
||||||
from ansible.inventory.host import Host
|
|
||||||
from ansible.module_utils.six import string_types, iteritems
|
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
|
||||||
from ansible.parsing.utils.addresses import parse_address
|
|
||||||
from ansible.plugins import vars_loader
|
|
||||||
from ansible.utils.vars import combine_vars
|
|
||||||
from ansible.utils.path import unfrackpath
|
|
||||||
|
|
||||||
try:
|
|
||||||
from __main__ import display
|
|
||||||
except ImportError:
|
|
||||||
from ansible.utils.display import Display
|
|
||||||
display = Display()
|
|
||||||
|
|
||||||
|
|
||||||
HOSTS_PATTERNS_CACHE = {}
|
|
||||||
|
|
||||||
|
|
||||||
class Inventory(object):
|
|
||||||
"""
|
|
||||||
Host inventory for ansible.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, loader, variable_manager, host_list=C.DEFAULT_HOST_LIST):
|
|
||||||
|
|
||||||
# the host file file, or script path, or list of hosts
|
|
||||||
# if a list, inventory data will NOT be loaded
|
|
||||||
self.host_list = unfrackpath(host_list, follow=False)
|
|
||||||
self._loader = loader
|
|
||||||
self._variable_manager = variable_manager
|
|
||||||
self.localhost = None
|
|
||||||
|
|
||||||
# caching to avoid repeated calculations, particularly with
|
|
||||||
# external inventory scripts.
|
|
||||||
|
|
||||||
self._vars_per_host = {}
|
|
||||||
self._vars_per_group = {}
|
|
||||||
self._hosts_cache = {}
|
|
||||||
self._pattern_cache = {}
|
|
||||||
self._group_dict_cache = {}
|
|
||||||
self._vars_plugins = []
|
|
||||||
|
|
||||||
self._basedir = self.basedir()
|
|
||||||
|
|
||||||
# Contains set of filenames under group_vars directories
|
|
||||||
self._group_vars_files = self._find_group_vars_files(self._basedir)
|
|
||||||
self._host_vars_files = self._find_host_vars_files(self._basedir)
|
|
||||||
|
|
||||||
# to be set by calling set_playbook_basedir by playbook code
|
|
||||||
self._playbook_basedir = None
|
|
||||||
|
|
||||||
# the inventory object holds a list of groups
|
|
||||||
self.groups = {}
|
|
||||||
|
|
||||||
# a list of host(names) to contain current inquiries to
|
|
||||||
self._restriction = None
|
|
||||||
self._subset = None
|
|
||||||
|
|
||||||
# clear the cache here, which is only useful if more than
|
|
||||||
# one Inventory objects are created when using the API directly
|
|
||||||
self.clear_pattern_cache()
|
|
||||||
self.clear_group_dict_cache()
|
|
||||||
|
|
||||||
self.parse_inventory(host_list)
|
|
||||||
|
|
||||||
def serialize(self):
|
|
||||||
data = dict()
|
|
||||||
return data
|
|
||||||
|
|
||||||
def deserialize(self, data):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def parse_inventory(self, host_list):
|
|
||||||
|
|
||||||
if isinstance(host_list, string_types):
|
|
||||||
if "," in host_list:
|
|
||||||
host_list = host_list.split(",")
|
|
||||||
host_list = [ h for h in host_list if h and h.strip() ]
|
|
||||||
|
|
||||||
self.parser = None
|
|
||||||
|
|
||||||
# Always create the 'all' and 'ungrouped' groups, even if host_list is
|
|
||||||
# empty: in this case we will subsequently add the implicit 'localhost' to it.
|
|
||||||
|
|
||||||
ungrouped = Group('ungrouped')
|
|
||||||
all = Group('all')
|
|
||||||
all.add_child_group(ungrouped)
|
|
||||||
base_groups = frozenset([all, ungrouped])
|
|
||||||
|
|
||||||
self.groups = dict(all=all, ungrouped=ungrouped)
|
|
||||||
|
|
||||||
if host_list is None:
|
|
||||||
pass
|
|
||||||
elif isinstance(host_list, list):
|
|
||||||
for h in host_list:
|
|
||||||
try:
|
|
||||||
(host, port) = parse_address(h, allow_ranges=False)
|
|
||||||
except AnsibleError as e:
|
|
||||||
display.vvv("Unable to parse address from hostname, leaving unchanged: %s" % to_text(e))
|
|
||||||
host = h
|
|
||||||
port = None
|
|
||||||
|
|
||||||
new_host = Host(host, port)
|
|
||||||
if h in C.LOCALHOST:
|
|
||||||
# set default localhost from inventory to avoid creating an implicit one. Last localhost defined 'wins'.
|
|
||||||
if self.localhost is not None:
|
|
||||||
display.warning("A duplicate localhost-like entry was found (%s). First found localhost was %s" % (h, self.localhost.name))
|
|
||||||
display.vvvv("Set default localhost to %s" % h)
|
|
||||||
self.localhost = new_host
|
|
||||||
all.add_host(new_host)
|
|
||||||
elif self._loader.path_exists(host_list):
|
|
||||||
# TODO: switch this to a plugin loader and a 'condition' per plugin on which it should be tried, restoring 'inventory pllugins'
|
|
||||||
if self.is_directory(host_list):
|
|
||||||
# Ensure basedir is inside the directory
|
|
||||||
host_list = os.path.join(self.host_list, "")
|
|
||||||
self.parser = InventoryDirectory(loader=self._loader, groups=self.groups, filename=host_list)
|
|
||||||
else:
|
|
||||||
self.parser = get_file_parser(host_list, self.groups, self._loader)
|
|
||||||
vars_loader.add_directory(self._basedir, with_subdir=True)
|
|
||||||
|
|
||||||
if not self.parser:
|
|
||||||
# should never happen, but JIC
|
|
||||||
raise AnsibleError("Unable to parse %s as an inventory source" % host_list)
|
|
||||||
else:
|
|
||||||
display.warning("Host file not found: %s" % to_text(host_list))
|
|
||||||
|
|
||||||
self._vars_plugins = [ x for x in vars_loader.all(self) ]
|
|
||||||
|
|
||||||
### POST PROCESS groups and hosts after specific parser was invoked
|
|
||||||
|
|
||||||
hosts = []
|
|
||||||
group_names = set()
|
|
||||||
# set group vars from group_vars/ files and vars plugins
|
|
||||||
for g in self.groups:
|
|
||||||
group = self.groups[g]
|
|
||||||
group.vars = combine_vars(group.vars, self.get_group_variables(group.name))
|
|
||||||
self.get_group_vars(group)
|
|
||||||
group_names.add(group.name)
|
|
||||||
hosts.extend(group.get_hosts())
|
|
||||||
|
|
||||||
host_names = set()
|
|
||||||
# get host vars from host_vars/ files and vars plugins
|
|
||||||
for host in hosts:
|
|
||||||
host.vars = combine_vars(host.vars, self.get_host_variables(host.name))
|
|
||||||
self.get_host_vars(host)
|
|
||||||
host_names.add(host.name)
|
|
||||||
|
|
||||||
mygroups = host.get_groups()
|
|
||||||
|
|
||||||
# ensure hosts are always in 'all'
|
|
||||||
if all not in mygroups:
|
|
||||||
all.add_host(host)
|
|
||||||
|
|
||||||
if ungrouped in mygroups:
|
|
||||||
# clear ungrouped of any incorrectly stored by parser
|
|
||||||
if set(mygroups).difference(base_groups):
|
|
||||||
host.remove_group(ungrouped)
|
|
||||||
else:
|
|
||||||
# add ungrouped hosts to ungrouped
|
|
||||||
length = len(mygroups)
|
|
||||||
if length == 0 or (length == 1 and all in mygroups):
|
|
||||||
ungrouped.add_host(host)
|
|
||||||
|
|
||||||
# warn if overloading identifier as both group and host
|
|
||||||
for conflict in group_names.intersection(host_names):
|
|
||||||
display.warning("Found both group and host with same name: %s" % conflict)
|
|
||||||
|
|
||||||
def _match(self, str, pattern_str):
|
|
||||||
try:
|
|
||||||
if pattern_str.startswith('~'):
|
|
||||||
return re.search(pattern_str[1:], str)
|
|
||||||
else:
|
|
||||||
return fnmatch.fnmatch(str, pattern_str)
|
|
||||||
except Exception:
|
|
||||||
raise AnsibleError('invalid host pattern: %s' % pattern_str)
|
|
||||||
|
|
||||||
def _match_list(self, items, item_attr, pattern_str):
|
|
||||||
results = []
|
|
||||||
try:
|
|
||||||
if not pattern_str.startswith('~'):
|
|
||||||
pattern = re.compile(fnmatch.translate(pattern_str))
|
|
||||||
else:
|
|
||||||
pattern = re.compile(pattern_str[1:])
|
|
||||||
except Exception:
|
|
||||||
raise AnsibleError('invalid host pattern: %s' % pattern_str)
|
|
||||||
|
|
||||||
for item in items:
|
|
||||||
if pattern.match(getattr(item, item_attr)):
|
|
||||||
results.append(item)
|
|
||||||
return results
|
|
||||||
|
|
||||||
def get_hosts(self, pattern="all", ignore_limits=False, ignore_restrictions=False, order=None):
|
|
||||||
"""
|
|
||||||
Takes a pattern or list of patterns and returns a list of matching
|
|
||||||
inventory host names, taking into account any active restrictions
|
|
||||||
or applied subsets
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Check if pattern already computed
|
|
||||||
if isinstance(pattern, list):
|
|
||||||
pattern_hash = u":".join(pattern)
|
|
||||||
else:
|
|
||||||
pattern_hash = pattern
|
|
||||||
|
|
||||||
if not ignore_limits and self._subset:
|
|
||||||
pattern_hash += u":%s" % to_text(self._subset)
|
|
||||||
|
|
||||||
if not ignore_restrictions and self._restriction:
|
|
||||||
pattern_hash += u":%s" % to_text(self._restriction)
|
|
||||||
|
|
||||||
if pattern_hash not in HOSTS_PATTERNS_CACHE:
|
|
||||||
|
|
||||||
patterns = Inventory.split_host_pattern(pattern)
|
|
||||||
hosts = self._evaluate_patterns(patterns)
|
|
||||||
|
|
||||||
# mainly useful for hostvars[host] access
|
|
||||||
if not ignore_limits and self._subset:
|
|
||||||
# exclude hosts not in a subset, if defined
|
|
||||||
subset = self._evaluate_patterns(self._subset)
|
|
||||||
hosts = [ h for h in hosts if h in subset ]
|
|
||||||
|
|
||||||
if not ignore_restrictions and self._restriction:
|
|
||||||
# exclude hosts mentioned in any restriction (ex: failed hosts)
|
|
||||||
hosts = [ h for h in hosts if h.name in self._restriction ]
|
|
||||||
|
|
||||||
seen = set()
|
|
||||||
HOSTS_PATTERNS_CACHE[pattern_hash] = [x for x in hosts if x not in seen and not seen.add(x)]
|
|
||||||
|
|
||||||
# sort hosts list if needed (should only happen when called from strategy)
|
|
||||||
if order in ['sorted', 'reverse_sorted']:
|
|
||||||
from operator import attrgetter
|
|
||||||
hosts = sorted(HOSTS_PATTERNS_CACHE[pattern_hash][:], key=attrgetter('name'), reverse=(order == 'reverse_sorted'))
|
|
||||||
elif order == 'reverse_inventory':
|
|
||||||
hosts = sorted(HOSTS_PATTERNS_CACHE[pattern_hash][:], reverse=True)
|
|
||||||
else:
|
|
||||||
hosts = HOSTS_PATTERNS_CACHE[pattern_hash][:]
|
|
||||||
if order == 'shuffle':
|
|
||||||
from random import shuffle
|
|
||||||
shuffle(hosts)
|
|
||||||
elif order not in [None, 'inventory']:
|
|
||||||
AnsibleError("Invalid 'order' specified for inventory hosts: %s" % order)
|
|
||||||
|
|
||||||
return hosts
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def split_host_pattern(cls, pattern):
|
|
||||||
"""
|
|
||||||
Takes a string containing host patterns separated by commas (or a list
|
|
||||||
thereof) and returns a list of single patterns (which may not contain
|
|
||||||
commas). Whitespace is ignored.
|
|
||||||
|
|
||||||
Also accepts ':' as a separator for backwards compatibility, but it is
|
|
||||||
not recommended due to the conflict with IPv6 addresses and host ranges.
|
|
||||||
|
|
||||||
Example: 'a,b[1], c[2:3] , d' -> ['a', 'b[1]', 'c[2:3]', 'd']
|
|
||||||
"""
|
|
||||||
|
|
||||||
if isinstance(pattern, list):
|
|
||||||
return list(itertools.chain(*map(cls.split_host_pattern, pattern)))
|
|
||||||
|
|
||||||
# If it's got commas in it, we'll treat it as a straightforward
|
|
||||||
# comma-separated list of patterns.
|
|
||||||
|
|
||||||
elif ',' in pattern:
|
|
||||||
patterns = re.split('\s*,\s*', pattern)
|
|
||||||
|
|
||||||
# If it doesn't, it could still be a single pattern. This accounts for
|
|
||||||
# non-separator uses of colons: IPv6 addresses and [x:y] host ranges.
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
(base, port) = parse_address(pattern, allow_ranges=True)
|
|
||||||
patterns = [pattern]
|
|
||||||
except:
|
|
||||||
# The only other case we accept is a ':'-separated list of patterns.
|
|
||||||
# This mishandles IPv6 addresses, and is retained only for backwards
|
|
||||||
# compatibility.
|
|
||||||
patterns = re.findall(
|
|
||||||
r'''(?: # We want to match something comprising:
|
|
||||||
[^\s:\[\]] # (anything other than whitespace or ':[]'
|
|
||||||
| # ...or...
|
|
||||||
\[[^\]]*\] # a single complete bracketed expression)
|
|
||||||
)+ # occurring once or more
|
|
||||||
''', pattern, re.X
|
|
||||||
)
|
|
||||||
|
|
||||||
return [p.strip() for p in patterns]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def order_patterns(cls, patterns):
|
|
||||||
|
|
||||||
# Host specifiers should be sorted to ensure consistent behavior
|
|
||||||
pattern_regular = []
|
|
||||||
pattern_intersection = []
|
|
||||||
pattern_exclude = []
|
|
||||||
for p in patterns:
|
|
||||||
if p.startswith("!"):
|
|
||||||
pattern_exclude.append(p)
|
|
||||||
elif p.startswith("&"):
|
|
||||||
pattern_intersection.append(p)
|
|
||||||
elif p:
|
|
||||||
pattern_regular.append(p)
|
|
||||||
|
|
||||||
# if no regular pattern was given, hence only exclude and/or intersection
|
|
||||||
# make that magically work
|
|
||||||
if pattern_regular == []:
|
|
||||||
pattern_regular = ['all']
|
|
||||||
|
|
||||||
# when applying the host selectors, run those without the "&" or "!"
|
|
||||||
# first, then the &s, then the !s.
|
|
||||||
return pattern_regular + pattern_intersection + pattern_exclude
|
|
||||||
|
|
||||||
def _evaluate_patterns(self, patterns):
|
|
||||||
"""
|
|
||||||
Takes a list of patterns and returns a list of matching host names,
|
|
||||||
taking into account any negative and intersection patterns.
|
|
||||||
"""
|
|
||||||
|
|
||||||
patterns = Inventory.order_patterns(patterns)
|
|
||||||
hosts = []
|
|
||||||
|
|
||||||
for p in patterns:
|
|
||||||
# avoid resolving a pattern that is a plain host
|
|
||||||
if p in self._hosts_cache:
|
|
||||||
hosts.append(self.get_host(p))
|
|
||||||
else:
|
|
||||||
that = self._match_one_pattern(p)
|
|
||||||
if p.startswith("!"):
|
|
||||||
hosts = [ h for h in hosts if h not in that ]
|
|
||||||
elif p.startswith("&"):
|
|
||||||
hosts = [ h for h in hosts if h in that ]
|
|
||||||
else:
|
|
||||||
to_append = [ h for h in that if h.name not in [ y.name for y in hosts ] ]
|
|
||||||
hosts.extend(to_append)
|
|
||||||
return hosts
|
|
||||||
|
|
||||||
def _match_one_pattern(self, pattern):
|
|
||||||
"""
|
|
||||||
Takes a single pattern and returns a list of matching host names.
|
|
||||||
Ignores intersection (&) and exclusion (!) specifiers.
|
|
||||||
|
|
||||||
The pattern may be:
|
|
||||||
|
|
||||||
1. A regex starting with ~, e.g. '~[abc]*'
|
|
||||||
2. A shell glob pattern with ?/*/[chars]/[!chars], e.g. 'foo*'
|
|
||||||
3. An ordinary word that matches itself only, e.g. 'foo'
|
|
||||||
|
|
||||||
The pattern is matched using the following rules:
|
|
||||||
|
|
||||||
1. If it's 'all', it matches all hosts in all groups.
|
|
||||||
2. Otherwise, for each known group name:
|
|
||||||
(a) if it matches the group name, the results include all hosts
|
|
||||||
in the group or any of its children.
|
|
||||||
(b) otherwise, if it matches any hosts in the group, the results
|
|
||||||
include the matching hosts.
|
|
||||||
|
|
||||||
This means that 'foo*' may match one or more groups (thus including all
|
|
||||||
hosts therein) but also hosts in other groups.
|
|
||||||
|
|
||||||
The built-in groups 'all' and 'ungrouped' are special. No pattern can
|
|
||||||
match these group names (though 'all' behaves as though it matches, as
|
|
||||||
described above). The word 'ungrouped' can match a host of that name,
|
|
||||||
and patterns like 'ungr*' and 'al*' can match either hosts or groups
|
|
||||||
other than all and ungrouped.
|
|
||||||
|
|
||||||
If the pattern matches one or more group names according to these rules,
|
|
||||||
it may have an optional range suffix to select a subset of the results.
|
|
||||||
This is allowed only if the pattern is not a regex, i.e. '~foo[1]' does
|
|
||||||
not work (the [1] is interpreted as part of the regex), but 'foo*[1]'
|
|
||||||
would work if 'foo*' matched the name of one or more groups.
|
|
||||||
|
|
||||||
Duplicate matches are always eliminated from the results.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if pattern.startswith("&") or pattern.startswith("!"):
|
|
||||||
pattern = pattern[1:]
|
|
||||||
|
|
||||||
if pattern not in self._pattern_cache:
|
|
||||||
(expr, slice) = self._split_subscript(pattern)
|
|
||||||
hosts = self._enumerate_matches(expr)
|
|
||||||
try:
|
|
||||||
hosts = self._apply_subscript(hosts, slice)
|
|
||||||
except IndexError:
|
|
||||||
raise AnsibleError("No hosts matched the subscripted pattern '%s'" % pattern)
|
|
||||||
self._pattern_cache[pattern] = hosts
|
|
||||||
|
|
||||||
return self._pattern_cache[pattern]
|
|
||||||
|
|
||||||
def _split_subscript(self, pattern):
|
|
||||||
"""
|
|
||||||
Takes a pattern, checks if it has a subscript, and returns the pattern
|
|
||||||
without the subscript and a (start,end) tuple representing the given
|
|
||||||
subscript (or None if there is no subscript).
|
|
||||||
|
|
||||||
Validates that the subscript is in the right syntax, but doesn't make
|
|
||||||
sure the actual indices make sense in context.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Do not parse regexes for enumeration info
|
|
||||||
if pattern.startswith('~'):
|
|
||||||
return (pattern, None)
|
|
||||||
|
|
||||||
# We want a pattern followed by an integer or range subscript.
|
|
||||||
# (We can't be more restrictive about the expression because the
|
|
||||||
# fnmatch semantics permit [\[:\]] to occur.)
|
|
||||||
|
|
||||||
pattern_with_subscript = re.compile(
|
|
||||||
r'''^
|
|
||||||
(.+) # A pattern expression ending with...
|
|
||||||
\[(?: # A [subscript] expression comprising:
|
|
||||||
(-?[0-9]+)| # A single positive or negative number
|
|
||||||
([0-9]+)([:-]) # Or an x:y or x: range.
|
|
||||||
([0-9]*)
|
|
||||||
)\]
|
|
||||||
$
|
|
||||||
''', re.X
|
|
||||||
)
|
|
||||||
|
|
||||||
subscript = None
|
|
||||||
m = pattern_with_subscript.match(pattern)
|
|
||||||
if m:
|
|
||||||
(pattern, idx, start, sep, end) = m.groups()
|
|
||||||
if idx:
|
|
||||||
subscript = (int(idx), None)
|
|
||||||
else:
|
|
||||||
if not end:
|
|
||||||
end = -1
|
|
||||||
subscript = (int(start), int(end))
|
|
||||||
if sep == '-':
|
|
||||||
display.warning("Use [x:y] inclusive subscripts instead of [x-y] which has been removed")
|
|
||||||
|
|
||||||
return (pattern, subscript)
|
|
||||||
|
|
||||||
def _apply_subscript(self, hosts, subscript):
|
|
||||||
"""
|
|
||||||
Takes a list of hosts and a (start,end) tuple and returns the subset of
|
|
||||||
hosts based on the subscript (which may be None to return all hosts).
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not hosts or not subscript:
|
|
||||||
return hosts
|
|
||||||
|
|
||||||
(start, end) = subscript
|
|
||||||
|
|
||||||
if end:
|
|
||||||
if end == -1:
|
|
||||||
end = len(hosts)-1
|
|
||||||
return hosts[start:end+1]
|
|
||||||
else:
|
|
||||||
return [ hosts[start] ]
|
|
||||||
|
|
||||||
def _enumerate_matches(self, pattern):
|
|
||||||
"""
|
|
||||||
Returns a list of host names matching the given pattern according to the
|
|
||||||
rules explained above in _match_one_pattern.
|
|
||||||
"""
|
|
||||||
|
|
||||||
results = []
|
|
||||||
|
|
||||||
def __append_host_to_results(host):
|
|
||||||
if host.name not in results:
|
|
||||||
if not host.implicit:
|
|
||||||
results.append(host)
|
|
||||||
|
|
||||||
groups = self.get_groups()
|
|
||||||
matched = False
|
|
||||||
for group in groups.values():
|
|
||||||
if self._match(group.name, pattern):
|
|
||||||
matched = True
|
|
||||||
for host in group.get_hosts():
|
|
||||||
__append_host_to_results(host)
|
|
||||||
else:
|
|
||||||
matching_hosts = self._match_list(group.get_hosts(), 'name', pattern)
|
|
||||||
if matching_hosts:
|
|
||||||
matched = True
|
|
||||||
for host in matching_hosts:
|
|
||||||
__append_host_to_results(host)
|
|
||||||
|
|
||||||
if pattern in C.LOCALHOST and len(results) == 0:
|
|
||||||
new_host = self._create_implicit_localhost(pattern)
|
|
||||||
results.append(new_host)
|
|
||||||
matched = True
|
|
||||||
|
|
||||||
if not matched:
|
|
||||||
display.warning("Could not match supplied host pattern, ignoring: %s" % pattern)
|
|
||||||
return results
|
|
||||||
|
|
||||||
def _create_implicit_localhost(self, pattern):
|
|
||||||
|
|
||||||
if self.localhost:
|
|
||||||
new_host = self.localhost
|
|
||||||
else:
|
|
||||||
new_host = Host(pattern)
|
|
||||||
new_host.address = "127.0.0.1"
|
|
||||||
new_host.implicit = True
|
|
||||||
new_host.vars = self.get_host_vars(new_host)
|
|
||||||
new_host.set_variable("ansible_connection", "local")
|
|
||||||
if "ansible_python_interpreter" not in new_host.vars:
|
|
||||||
py_interp = sys.executable
|
|
||||||
if not py_interp:
|
|
||||||
# sys.executable is not set in some cornercases. #13585
|
|
||||||
display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default.'
|
|
||||||
' You can correct this by setting ansible_python_interpreter for localhost')
|
|
||||||
py_interp = '/usr/bin/python'
|
|
||||||
new_host.set_variable("ansible_python_interpreter", py_interp)
|
|
||||||
self.get_group("ungrouped").add_host(new_host)
|
|
||||||
self.localhost = new_host
|
|
||||||
return new_host
|
|
||||||
|
|
||||||
def clear_pattern_cache(self):
|
|
||||||
''' called exclusively by the add_host plugin to allow patterns to be recalculated '''
|
|
||||||
global HOSTS_PATTERNS_CACHE
|
|
||||||
HOSTS_PATTERNS_CACHE = {}
|
|
||||||
self._pattern_cache = {}
|
|
||||||
|
|
||||||
def clear_group_dict_cache(self):
|
|
||||||
''' called exclusively by the add_host and group_by plugins '''
|
|
||||||
self._group_dict_cache = {}
|
|
||||||
|
|
||||||
def groups_for_host(self, host):
|
|
||||||
if host in self._hosts_cache:
|
|
||||||
return self._hosts_cache[host].get_groups()
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def get_groups(self):
|
|
||||||
return self.groups
|
|
||||||
|
|
||||||
def get_host(self, hostname):
|
|
||||||
if hostname not in self._hosts_cache:
|
|
||||||
self._hosts_cache[hostname] = self._get_host(hostname)
|
|
||||||
return self._hosts_cache[hostname]
|
|
||||||
|
|
||||||
def _get_host(self, hostname):
|
|
||||||
matching_host = None
|
|
||||||
if hostname in C.LOCALHOST:
|
|
||||||
if self.localhost:
|
|
||||||
matching_host= self.localhost
|
|
||||||
else:
|
|
||||||
for host in self.get_group('all').get_hosts():
|
|
||||||
if host.name in C.LOCALHOST:
|
|
||||||
matching_host = host
|
|
||||||
break
|
|
||||||
if not matching_host:
|
|
||||||
matching_host = self._create_implicit_localhost(hostname)
|
|
||||||
# update caches
|
|
||||||
self._hosts_cache[hostname] = matching_host
|
|
||||||
for host in C.LOCALHOST.difference((hostname,)):
|
|
||||||
self._hosts_cache[host] = self._hosts_cache[hostname]
|
|
||||||
else:
|
|
||||||
for group in self.groups.values():
|
|
||||||
for host in group.get_hosts():
|
|
||||||
if host not in self._hosts_cache:
|
|
||||||
self._hosts_cache[host.name] = host
|
|
||||||
if hostname == host.name:
|
|
||||||
matching_host = host
|
|
||||||
return matching_host
|
|
||||||
|
|
||||||
def get_group(self, groupname):
|
|
||||||
return self.groups.get(groupname)
|
|
||||||
|
|
||||||
def get_group_variables(self, groupname, update_cached=False, vault_password=None):
|
|
||||||
if groupname not in self._vars_per_group or update_cached:
|
|
||||||
self._vars_per_group[groupname] = self._get_group_variables(groupname, vault_password=vault_password)
|
|
||||||
return self._vars_per_group[groupname]
|
|
||||||
|
|
||||||
def _get_group_variables(self, groupname, vault_password=None):
|
|
||||||
|
|
||||||
group = self.get_group(groupname)
|
|
||||||
if group is None:
|
|
||||||
raise Exception("group not found: %s" % groupname)
|
|
||||||
|
|
||||||
vars = {}
|
|
||||||
|
|
||||||
# plugin.get_group_vars retrieves just vars for specific group
|
|
||||||
vars_results = [ plugin.get_group_vars(group, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_group_vars')]
|
|
||||||
for updated in vars_results:
|
|
||||||
if updated is not None:
|
|
||||||
vars = combine_vars(vars, updated)
|
|
||||||
|
|
||||||
# Read group_vars/ files
|
|
||||||
vars = combine_vars(vars, self.get_group_vars(group))
|
|
||||||
|
|
||||||
return vars
|
|
||||||
|
|
||||||
def get_group_dict(self):
|
|
||||||
"""
|
|
||||||
In get_vars() we merge a 'magic' dictionary 'groups' with group name
|
|
||||||
keys and hostname list values into every host variable set.
|
|
||||||
|
|
||||||
Cache the creation of this structure here
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not self._group_dict_cache:
|
|
||||||
for (group_name, group) in iteritems(self.groups):
|
|
||||||
self._group_dict_cache[group_name] = [h.name for h in group.get_hosts()]
|
|
||||||
|
|
||||||
return self._group_dict_cache
|
|
||||||
|
|
||||||
def get_vars(self, hostname, update_cached=False, vault_password=None):
|
|
||||||
|
|
||||||
host = self.get_host(hostname)
|
|
||||||
if not host:
|
|
||||||
raise AnsibleError("no vars as host is not in inventory: %s" % hostname)
|
|
||||||
return host.get_vars()
|
|
||||||
|
|
||||||
def get_host_variables(self, hostname, update_cached=False, vault_password=None):
|
|
||||||
|
|
||||||
if hostname not in self._vars_per_host or update_cached:
|
|
||||||
self._vars_per_host[hostname] = self._get_host_variables(hostname, vault_password=vault_password)
|
|
||||||
return self._vars_per_host[hostname]
|
|
||||||
|
|
||||||
def _get_host_variables(self, hostname, vault_password=None):
|
|
||||||
|
|
||||||
host = self.get_host(hostname)
|
|
||||||
if host is None:
|
|
||||||
raise AnsibleError("no host vars as host is not in inventory: %s" % hostname)
|
|
||||||
|
|
||||||
vars = {}
|
|
||||||
|
|
||||||
# plugin.run retrieves all vars (also from groups) for host
|
|
||||||
vars_results = [ plugin.run(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'run')]
|
|
||||||
for updated in vars_results:
|
|
||||||
if updated is not None:
|
|
||||||
vars = combine_vars(vars, updated)
|
|
||||||
|
|
||||||
# plugin.get_host_vars retrieves just vars for specific host
|
|
||||||
vars_results = [ plugin.get_host_vars(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_host_vars')]
|
|
||||||
for updated in vars_results:
|
|
||||||
if updated is not None:
|
|
||||||
vars = combine_vars(vars, updated)
|
|
||||||
|
|
||||||
# still need to check InventoryParser per host vars
|
|
||||||
# which actually means InventoryScript per host,
|
|
||||||
# which is not performant
|
|
||||||
if self.parser is not None:
|
|
||||||
vars = combine_vars(vars, self.parser.get_host_variables(host))
|
|
||||||
|
|
||||||
return vars
|
|
||||||
|
|
||||||
def add_group(self, group):
|
|
||||||
if group.name not in self.groups:
|
|
||||||
self.groups[group.name] = group
|
|
||||||
else:
|
|
||||||
raise AnsibleError("group already in inventory: %s" % group.name)
|
|
||||||
|
|
||||||
def list_hosts(self, pattern="all"):
|
|
||||||
|
|
||||||
""" return a list of hostnames for a pattern """
|
|
||||||
|
|
||||||
result = [ h for h in self.get_hosts(pattern) ]
|
|
||||||
if len(result) == 0 and pattern in C.LOCALHOST:
|
|
||||||
result = [pattern]
|
|
||||||
return result
|
|
||||||
|
|
||||||
def list_groups(self):
|
|
||||||
return sorted(self.groups.keys(), key=lambda x: x)
|
|
||||||
|
|
||||||
def restrict_to_hosts(self, restriction):
|
|
||||||
"""
|
|
||||||
Restrict list operations to the hosts given in restriction. This is used
|
|
||||||
to batch serial operations in main playbook code, don't use this for other
|
|
||||||
reasons.
|
|
||||||
"""
|
|
||||||
if restriction is None:
|
|
||||||
return
|
|
||||||
elif not isinstance(restriction, list):
|
|
||||||
restriction = [ restriction ]
|
|
||||||
self._restriction = [ h.name for h in restriction ]
|
|
||||||
|
|
||||||
def subset(self, subset_pattern):
|
|
||||||
"""
|
|
||||||
Limits inventory results to a subset of inventory that matches a given
|
|
||||||
pattern, such as to select a given geographic of numeric slice amongst
|
|
||||||
a previous 'hosts' selection that only select roles, or vice versa.
|
|
||||||
Corresponds to --limit parameter to ansible-playbook
|
|
||||||
"""
|
|
||||||
if subset_pattern is None:
|
|
||||||
self._subset = None
|
|
||||||
else:
|
|
||||||
subset_patterns = Inventory.split_host_pattern(subset_pattern)
|
|
||||||
results = []
|
|
||||||
# allow Unix style @filename data
|
|
||||||
for x in subset_patterns:
|
|
||||||
if x.startswith("@"):
|
|
||||||
fd = open(x[1:])
|
|
||||||
results.extend(fd.read().split("\n"))
|
|
||||||
fd.close()
|
|
||||||
else:
|
|
||||||
results.append(x)
|
|
||||||
self._subset = results
|
|
||||||
|
|
||||||
def remove_restriction(self):
|
|
||||||
""" Do not restrict list operations """
|
|
||||||
self._restriction = None
|
|
||||||
|
|
||||||
def is_file(self):
|
|
||||||
"""
|
|
||||||
Did inventory come from a file? We don't use the equivalent loader
|
|
||||||
methods in inventory, due to the fact that the loader does an implict
|
|
||||||
DWIM on the path, which may be incorrect for inventory paths relative
|
|
||||||
to the playbook basedir.
|
|
||||||
"""
|
|
||||||
if not isinstance(self.host_list, string_types):
|
|
||||||
return False
|
|
||||||
return os.path.isfile(self.host_list) or self.host_list == os.devnull
|
|
||||||
|
|
||||||
def is_directory(self, path):
|
|
||||||
"""
|
|
||||||
Is the inventory host list a directory? Same caveat for here as with
|
|
||||||
the is_file() method above.
|
|
||||||
"""
|
|
||||||
if not isinstance(self.host_list, string_types):
|
|
||||||
return False
|
|
||||||
return os.path.isdir(path)
|
|
||||||
|
|
||||||
def basedir(self):
|
|
||||||
""" if inventory came from a file, what's the directory? """
|
|
||||||
dname = self.host_list
|
|
||||||
if self.is_directory(self.host_list):
|
|
||||||
dname = self.host_list
|
|
||||||
elif not self.is_file():
|
|
||||||
dname = None
|
|
||||||
else:
|
|
||||||
dname = os.path.dirname(self.host_list)
|
|
||||||
if dname is None or dname == '' or dname == '.':
|
|
||||||
dname = os.getcwd()
|
|
||||||
if dname:
|
|
||||||
dname = os.path.abspath(dname)
|
|
||||||
return dname
|
|
||||||
|
|
||||||
def src(self):
|
|
||||||
""" if inventory came from a file, what's the directory and file name? """
|
|
||||||
if not self.is_file():
|
|
||||||
return None
|
|
||||||
return self.host_list
|
|
||||||
|
|
||||||
def playbook_basedir(self):
|
|
||||||
""" returns the directory of the current playbook """
|
|
||||||
return self._playbook_basedir
|
|
||||||
|
|
||||||
def set_playbook_basedir(self, dir_name):
|
|
||||||
"""
|
|
||||||
sets the base directory of the playbook so inventory can use it as a
|
|
||||||
basedir for host_ and group_vars, and other things.
|
|
||||||
"""
|
|
||||||
# Only update things if dir is a different playbook basedir
|
|
||||||
if dir_name != self._playbook_basedir:
|
|
||||||
# we're changing the playbook basedir, so if we had set one previously
|
|
||||||
# clear the host/group vars entries from the VariableManager so they're
|
|
||||||
# not incorrectly used by playbooks from different directories
|
|
||||||
if self._playbook_basedir:
|
|
||||||
self._variable_manager.clear_playbook_hostgroup_vars_files(self._playbook_basedir)
|
|
||||||
|
|
||||||
self._playbook_basedir = dir_name
|
|
||||||
# get group vars from group_vars/ files
|
|
||||||
# TODO: excluding the new_pb_basedir directory may result in group_vars
|
|
||||||
# files loading more than they should, however with the file caching
|
|
||||||
# we do this shouldn't be too much of an issue. Still, this should
|
|
||||||
# be fixed at some point to allow a "first load" to touch all of the
|
|
||||||
# directories, then later runs only touch the new basedir specified
|
|
||||||
found_group_vars = self._find_group_vars_files(self._playbook_basedir)
|
|
||||||
if found_group_vars:
|
|
||||||
self._group_vars_files = self._group_vars_files.union(found_group_vars)
|
|
||||||
for group in self.groups.values():
|
|
||||||
self.get_group_vars(group)
|
|
||||||
|
|
||||||
found_host_vars = self._find_host_vars_files(self._playbook_basedir)
|
|
||||||
if found_host_vars:
|
|
||||||
self._host_vars_files = self._host_vars_files.union(found_host_vars)
|
|
||||||
# get host vars from host_vars/ files
|
|
||||||
for host in self.get_hosts():
|
|
||||||
self.get_host_vars(host)
|
|
||||||
# invalidate cache
|
|
||||||
self._vars_per_host = {}
|
|
||||||
self._vars_per_group = {}
|
|
||||||
|
|
||||||
def get_host_vars(self, host, new_pb_basedir=False, return_results=False):
|
|
||||||
""" Read host_vars/ files """
|
|
||||||
return self._get_hostgroup_vars(host=host, group=None, new_pb_basedir=new_pb_basedir, return_results=return_results)
|
|
||||||
|
|
||||||
def get_group_vars(self, group, new_pb_basedir=False, return_results=False):
|
|
||||||
""" Read group_vars/ files """
|
|
||||||
return self._get_hostgroup_vars(host=None, group=group, new_pb_basedir=new_pb_basedir, return_results=return_results)
|
|
||||||
|
|
||||||
def _find_group_vars_files(self, basedir):
|
|
||||||
""" Find group_vars/ files """
|
|
||||||
if basedir in ('', None):
|
|
||||||
basedir = './'
|
|
||||||
path = os.path.realpath(os.path.join(basedir, 'group_vars'))
|
|
||||||
found_vars = set()
|
|
||||||
if os.path.exists(path):
|
|
||||||
if os.path.isdir(path):
|
|
||||||
found_vars = set(os.listdir(to_text(path)))
|
|
||||||
else:
|
|
||||||
display.warning("Found group_vars that is not a directory, skipping: %s" % path)
|
|
||||||
return found_vars
|
|
||||||
|
|
||||||
def _find_host_vars_files(self, basedir):
|
|
||||||
""" Find host_vars/ files """
|
|
||||||
if basedir in ('', None):
|
|
||||||
basedir = './'
|
|
||||||
path = os.path.realpath(os.path.join(basedir, 'host_vars'))
|
|
||||||
found_vars = set()
|
|
||||||
if os.path.exists(path):
|
|
||||||
found_vars = set(os.listdir(to_text(path)))
|
|
||||||
return found_vars
|
|
||||||
|
|
||||||
def _get_hostgroup_vars(self, host=None, group=None, new_pb_basedir=False, return_results=False):
|
|
||||||
"""
|
|
||||||
Loads variables from group_vars/<groupname> and host_vars/<hostname> in directories parallel
|
|
||||||
to the inventory base directory or in the same directory as the playbook. Variables in the playbook
|
|
||||||
dir will win over the inventory dir if files are in both.
|
|
||||||
"""
|
|
||||||
|
|
||||||
results = {}
|
|
||||||
scan_pass = 0
|
|
||||||
_basedir = self._basedir
|
|
||||||
_playbook_basedir = self._playbook_basedir
|
|
||||||
|
|
||||||
# look in both the inventory base directory and the playbook base directory
|
|
||||||
# unless we do an update for a new playbook base dir
|
|
||||||
if not new_pb_basedir and _playbook_basedir:
|
|
||||||
basedirs = [_basedir, _playbook_basedir]
|
|
||||||
else:
|
|
||||||
basedirs = [_basedir]
|
|
||||||
|
|
||||||
for basedir in basedirs:
|
|
||||||
# this can happen from particular API usages, particularly if not run
|
|
||||||
# from /usr/bin/ansible-playbook
|
|
||||||
if basedir in ('', None):
|
|
||||||
basedir = './'
|
|
||||||
|
|
||||||
scan_pass = scan_pass + 1
|
|
||||||
|
|
||||||
# it's not an eror if the directory does not exist, keep moving
|
|
||||||
if not os.path.exists(basedir):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# save work of second scan if the directories are the same
|
|
||||||
if _basedir == _playbook_basedir and scan_pass != 1:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Before trying to load vars from file, check that the directory contains relvant file names
|
|
||||||
if host is None and any(map(lambda ext: group.name + ext in self._group_vars_files, C.YAML_FILENAME_EXTENSIONS)):
|
|
||||||
# load vars in dir/group_vars/name_of_group
|
|
||||||
base_path = to_text(os.path.abspath(os.path.join(to_bytes(basedir), b"group_vars/" + to_bytes(group.name))), errors='surrogate_or_strict')
|
|
||||||
host_results = self._variable_manager.add_group_vars_file(base_path, self._loader)
|
|
||||||
if return_results:
|
|
||||||
results = combine_vars(results, host_results)
|
|
||||||
elif group is None and any(map(lambda ext: host.name + ext in self._host_vars_files, C.YAML_FILENAME_EXTENSIONS)):
|
|
||||||
# same for hostvars in dir/host_vars/name_of_host
|
|
||||||
base_path = to_text(os.path.abspath(os.path.join(to_bytes(basedir), b"host_vars/" + to_bytes(host.name))), errors='surrogate_or_strict')
|
|
||||||
group_results = self._variable_manager.add_host_vars_file(base_path, self._loader)
|
|
||||||
if return_results:
|
|
||||||
results = combine_vars(results, group_results)
|
|
||||||
|
|
||||||
# all done, results is a dictionary of variables for this particular host.
|
|
||||||
return results
|
|
||||||
|
|
||||||
def refresh_inventory(self):
|
|
||||||
|
|
||||||
self.clear_pattern_cache()
|
|
||||||
self.clear_group_dict_cache()
|
|
||||||
|
|
||||||
self._hosts_cache = {}
|
|
||||||
self._vars_per_host = {}
|
|
||||||
self._vars_per_group = {}
|
|
||||||
self.groups = {}
|
|
||||||
|
|
||||||
self.parse_inventory(self.host_list)
|
|
@ -0,0 +1,281 @@
|
|||||||
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
#############################################
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ansible import constants as C
|
||||||
|
from ansible.errors import AnsibleError
|
||||||
|
from ansible.inventory.group import Group
|
||||||
|
from ansible.inventory.host import Host
|
||||||
|
from ansible.module_utils.six import iteritems
|
||||||
|
from ansible.module_utils._text import to_bytes
|
||||||
|
from ansible.plugins.cache import FactCache
|
||||||
|
from ansible.utils.vars import combine_vars
|
||||||
|
from ansible.utils.path import basedir
|
||||||
|
|
||||||
|
try:
|
||||||
|
from __main__ import display
|
||||||
|
except ImportError:
|
||||||
|
from ansible.utils.display import Display
|
||||||
|
display = Display()
|
||||||
|
|
||||||
|
class InventoryData(object):
|
||||||
|
"""
|
||||||
|
Holds inventory data (host and group objects).
|
||||||
|
Using it's methods should guarantee expected relationships and data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
# the inventory object holds a list of groups
|
||||||
|
self.groups = {}
|
||||||
|
self.hosts = {}
|
||||||
|
|
||||||
|
# provides 'groups' magic var, host object has group_names
|
||||||
|
self._groups_dict_cache = {}
|
||||||
|
|
||||||
|
# current localhost, implicit or explicit
|
||||||
|
self.localhost = None
|
||||||
|
|
||||||
|
self.current_source = None
|
||||||
|
|
||||||
|
# Always create the 'all' and 'ungrouped' groups,
|
||||||
|
for group in ('all', 'ungrouped'):
|
||||||
|
self.add_group(group)
|
||||||
|
self.add_child('all', 'ungrouped')
|
||||||
|
|
||||||
|
# prime cache
|
||||||
|
self.cache = FactCache()
|
||||||
|
|
||||||
|
def serialize(self):
|
||||||
|
data = dict()
|
||||||
|
return data
|
||||||
|
|
||||||
|
def deserialize(self, data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _create_implicit_localhost(self, pattern):
|
||||||
|
|
||||||
|
if self.localhost:
|
||||||
|
new_host = self.localhost
|
||||||
|
else:
|
||||||
|
new_host = Host(pattern)
|
||||||
|
|
||||||
|
# use 'all' vars but not part of all group
|
||||||
|
new_host.vars = self.groups['all'].get_vars()
|
||||||
|
|
||||||
|
new_host.address = "127.0.0.1"
|
||||||
|
new_host.implicit = True
|
||||||
|
|
||||||
|
if "ansible_python_interpreter" not in new_host.vars:
|
||||||
|
py_interp = sys.executable
|
||||||
|
if not py_interp:
|
||||||
|
# sys.executable is not set in some cornercases. #13585
|
||||||
|
py_interp = '/usr/bin/python'
|
||||||
|
display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default.'
|
||||||
|
' You can correct this by setting ansible_python_interpreter for localhost')
|
||||||
|
new_host.set_variable("ansible_python_interpreter", py_interp)
|
||||||
|
|
||||||
|
if "ansible_connection" not in new_host.vars:
|
||||||
|
new_host.set_variable("ansible_connection", 'local')
|
||||||
|
|
||||||
|
self.localhost = new_host
|
||||||
|
|
||||||
|
return new_host
|
||||||
|
|
||||||
|
|
||||||
|
def _scan_groups_for_host(self, hostname, localhost=False):
|
||||||
|
''' in case something did not update inventory correctly, fallback to group scan '''
|
||||||
|
|
||||||
|
found = None
|
||||||
|
for group in self.groups.values():
|
||||||
|
for host in group.get_hosts():
|
||||||
|
if hostname == host.name:
|
||||||
|
found = host
|
||||||
|
break
|
||||||
|
if found:
|
||||||
|
break
|
||||||
|
|
||||||
|
if found:
|
||||||
|
display.debug('Found host (%s) in groups but it was missing from main inventory' % hostname)
|
||||||
|
|
||||||
|
return found
|
||||||
|
|
||||||
|
|
||||||
|
def reconcile_inventory(self):
|
||||||
|
''' Ensure inventory basic rules, run after updates '''
|
||||||
|
|
||||||
|
display.debug('Reconcile groups and hosts in inventory.')
|
||||||
|
self.current_source = None
|
||||||
|
|
||||||
|
group_names = set()
|
||||||
|
# set group vars from group_vars/ files and vars plugins
|
||||||
|
for g in self.groups:
|
||||||
|
group = self.groups[g]
|
||||||
|
group_names.add(group.name)
|
||||||
|
|
||||||
|
host_names = set()
|
||||||
|
# get host vars from host_vars/ files and vars plugins
|
||||||
|
for host in self.hosts.values():
|
||||||
|
host_names.add(host.name)
|
||||||
|
|
||||||
|
mygroups = host.get_groups()
|
||||||
|
|
||||||
|
# ensure hosts are always in 'all'
|
||||||
|
if 'all' not in mygroups and not host.implicit:
|
||||||
|
self.add_child('all', host.name)
|
||||||
|
|
||||||
|
if self.groups['ungrouped'] in mygroups:
|
||||||
|
# clear ungrouped of any incorrectly stored by parser
|
||||||
|
if set(mygroups).difference(set([self.groups['all'], self.groups['ungrouped']])):
|
||||||
|
host.remove_group(self.groups['ungrouped'])
|
||||||
|
|
||||||
|
elif not host.implicit:
|
||||||
|
# add ungrouped hosts to ungrouped, except implicit
|
||||||
|
length = len(mygroups)
|
||||||
|
if length == 0 or (length == 1 and all in mygroups):
|
||||||
|
self.add_child('ungrouped', host.name)
|
||||||
|
|
||||||
|
# special case for implicit hosts
|
||||||
|
if host.implicit:
|
||||||
|
host.vars = combine_vars(self.groups['all'].get_vars(), host.vars)
|
||||||
|
|
||||||
|
# warn if overloading identifier as both group and host
|
||||||
|
for conflict in group_names.intersection(host_names):
|
||||||
|
display.warning("Found both group and host with same name: %s" % conflict)
|
||||||
|
|
||||||
|
self._groups_dict_cache = {}
|
||||||
|
|
||||||
|
def get_host(self, hostname):
|
||||||
|
''' fetch host object using name
|
||||||
|
deal with implicit localhost
|
||||||
|
and possible inconsistent inventory '''
|
||||||
|
|
||||||
|
matching_host = self.hosts.get(hostname, None)
|
||||||
|
|
||||||
|
# if host is not in hosts dict
|
||||||
|
if matching_host is None:
|
||||||
|
|
||||||
|
# might need to create implicit localhost
|
||||||
|
if hostname in C.LOCALHOST:
|
||||||
|
matching_host = self._create_implicit_localhost(hostname)
|
||||||
|
|
||||||
|
# might be inconsistent inventory, search groups
|
||||||
|
if matching_host is None:
|
||||||
|
matching_host = self._scan_groups_for_host(hostname)
|
||||||
|
|
||||||
|
# if found/created update hosts dict
|
||||||
|
if matching_host:
|
||||||
|
self.hosts[hostname] = matching_host
|
||||||
|
|
||||||
|
return matching_host
|
||||||
|
|
||||||
|
|
||||||
|
def add_group(self, group):
|
||||||
|
''' adds a group to inventory if not there already '''
|
||||||
|
|
||||||
|
if group not in self.groups:
|
||||||
|
g = Group(group)
|
||||||
|
self.groups[group] = g
|
||||||
|
self._groups_dict_cache = {}
|
||||||
|
display.debug("Added group %s to inventory" % group)
|
||||||
|
else:
|
||||||
|
display.debug("group %s already in inventory" % group)
|
||||||
|
|
||||||
|
def add_host(self, host, group=None, port=None):
|
||||||
|
''' adds a host to inventory and possibly a group if not there already '''
|
||||||
|
|
||||||
|
g = None
|
||||||
|
if group:
|
||||||
|
if group in self.groups:
|
||||||
|
g = self.groups[group]
|
||||||
|
else:
|
||||||
|
raise AnsibleError("Could not find group %s in inventory" % group)
|
||||||
|
|
||||||
|
if host not in self.hosts:
|
||||||
|
h = Host(host, port)
|
||||||
|
self.hosts[host] = h
|
||||||
|
if self.current_source: # set to 'first source' in which host was encountered
|
||||||
|
self.set_variable(host, 'inventory_file', os.path.basename(self.current_source))
|
||||||
|
self.set_variable(host, 'inventory_dir', basedir(self.current_source))
|
||||||
|
else:
|
||||||
|
self.set_variable(host, 'inventory_file', None)
|
||||||
|
self.set_variable(host, 'inventory_dir', None)
|
||||||
|
display.debug("Added host %s to inventory" % (host))
|
||||||
|
|
||||||
|
# set default localhost from inventory to avoid creating an implicit one. Last localhost defined 'wins'.
|
||||||
|
if host in C.LOCALHOST:
|
||||||
|
if self.localhost is None:
|
||||||
|
self.localhost = self.hosts[host]
|
||||||
|
display.vvvv("Set default localhost to %s" % h)
|
||||||
|
else:
|
||||||
|
display.warning("A duplicate localhost-like entry was found (%s). First found localhost was %s" % (h, self.localhost.name))
|
||||||
|
else:
|
||||||
|
h = self.hosts[host]
|
||||||
|
|
||||||
|
if g and host not in g.get_hosts():
|
||||||
|
g.add_host(h)
|
||||||
|
self._groups_dict_cache = {}
|
||||||
|
display.debug("Added host %s to group %s" % (host,group))
|
||||||
|
|
||||||
|
|
||||||
|
def set_variable(self, entity, varname, value):
|
||||||
|
''' sets a varible for an inventory object '''
|
||||||
|
|
||||||
|
if entity in self.groups:
|
||||||
|
inv_object = self.groups[entity]
|
||||||
|
elif entity in self.hosts:
|
||||||
|
inv_object = self.hosts[entity]
|
||||||
|
else:
|
||||||
|
raise AnsibleError("Could not identify group or host named %s" % entity)
|
||||||
|
|
||||||
|
inv_object.set_variable(varname, value)
|
||||||
|
display.debug('set %s for %s' % (varname, entity))
|
||||||
|
|
||||||
|
|
||||||
|
def add_child(self, group, child):
|
||||||
|
''' Add host or group to group '''
|
||||||
|
|
||||||
|
if group in self.groups:
|
||||||
|
g = self.groups[group]
|
||||||
|
if child in self.groups:
|
||||||
|
g.add_child_group(self.groups[child])
|
||||||
|
elif child in self.hosts:
|
||||||
|
g.add_host(self.hosts[child])
|
||||||
|
else:
|
||||||
|
raise AnsibleError("%s is not a known host nor group" % child)
|
||||||
|
self._groups_dict_cache = {}
|
||||||
|
display.debug('Group %s now contains %s' % (group, child))
|
||||||
|
else:
|
||||||
|
raise AnsibleError("%s is not a known group" % group)
|
||||||
|
|
||||||
|
def get_groups_dict(self):
|
||||||
|
"""
|
||||||
|
We merge a 'magic' var 'groups' with group name keys and hostname list values into every host variable set. Cache for speed.
|
||||||
|
"""
|
||||||
|
if not self._groups_dict_cache:
|
||||||
|
for (group_name, group) in iteritems(self.groups):
|
||||||
|
self._groups_dict_cache[group_name] = [h.name for h in group.get_hosts()]
|
||||||
|
|
||||||
|
return self._groups_dict_cache
|
||||||
|
|
@ -1,299 +0,0 @@
|
|||||||
# (c) 2013, Daniel Hokka Zakrisson <daniel@hozac.com>
|
|
||||||
# (c) 2014, Serge van Ginderachter <serge@vanginderachter.be>
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
#############################################
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from ansible import constants as C
|
|
||||||
from ansible.errors import AnsibleError
|
|
||||||
from ansible.utils.vars import combine_vars
|
|
||||||
from ansible.module_utils._text import to_native
|
|
||||||
|
|
||||||
#FIXME: make into plugins
|
|
||||||
from ansible.inventory.ini import InventoryParser as InventoryINIParser
|
|
||||||
from ansible.inventory.yaml import InventoryParser as InventoryYAMLParser
|
|
||||||
from ansible.inventory.script import InventoryScript
|
|
||||||
|
|
||||||
__all__ = ['get_file_parser']
|
|
||||||
|
|
||||||
def get_file_parser(hostsfile, groups, loader):
|
|
||||||
# check to see if the specified file starts with a
|
|
||||||
# shebang (#!/), so if an error is raised by the parser
|
|
||||||
# class we can show a more apropos error
|
|
||||||
|
|
||||||
shebang_present = False
|
|
||||||
processed = False
|
|
||||||
myerr = []
|
|
||||||
parser = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(hostsfile, 'rb') as inv_file:
|
|
||||||
initial_chars = inv_file.read(2)
|
|
||||||
if initial_chars.startswith(b'#!'):
|
|
||||||
shebang_present = True
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
#FIXME: make this 'plugin loop'
|
|
||||||
# script
|
|
||||||
if loader.is_executable(hostsfile):
|
|
||||||
try:
|
|
||||||
parser = InventoryScript(loader=loader, groups=groups, filename=hostsfile)
|
|
||||||
processed = True
|
|
||||||
except Exception as e:
|
|
||||||
myerr.append('Attempted to execute "%s" as inventory script: %s' % (hostsfile, to_native(e)))
|
|
||||||
elif shebang_present:
|
|
||||||
|
|
||||||
myerr.append("The inventory file \'%s\' looks like it should be an executable inventory script, but is not marked executable. "
|
|
||||||
"Perhaps you want to correct this with `chmod +x %s`?" % (hostsfile, hostsfile))
|
|
||||||
|
|
||||||
# YAML/JSON
|
|
||||||
if not processed and not shebang_present and os.path.splitext(hostsfile)[-1] in C.YAML_FILENAME_EXTENSIONS:
|
|
||||||
try:
|
|
||||||
parser = InventoryYAMLParser(loader=loader, groups=groups, filename=hostsfile)
|
|
||||||
processed = True
|
|
||||||
except Exception as e:
|
|
||||||
myerr.append('Attempted to read "%s" as YAML: %s' % (to_native(hostsfile), to_native(e)))
|
|
||||||
|
|
||||||
# ini
|
|
||||||
if not processed and not shebang_present:
|
|
||||||
try:
|
|
||||||
parser = InventoryINIParser(loader=loader, groups=groups, filename=hostsfile)
|
|
||||||
processed = True
|
|
||||||
except Exception as e:
|
|
||||||
myerr.append('Attempted to read "%s" as ini file: %s ' % (to_native(hostsfile), to_native(e)))
|
|
||||||
|
|
||||||
if not processed and myerr:
|
|
||||||
raise AnsibleError('\n'.join(myerr))
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
class InventoryDirectory(object):
|
|
||||||
''' Host inventory parser for ansible using a directory of inventories. '''
|
|
||||||
|
|
||||||
def __init__(self, loader, groups=None, filename=C.DEFAULT_HOST_LIST):
|
|
||||||
if groups is None:
|
|
||||||
groups = dict()
|
|
||||||
|
|
||||||
self.names = os.listdir(filename)
|
|
||||||
self.names.sort()
|
|
||||||
self.directory = filename
|
|
||||||
self.parsers = []
|
|
||||||
self.hosts = {}
|
|
||||||
self.groups = groups
|
|
||||||
|
|
||||||
self._loader = loader
|
|
||||||
|
|
||||||
for i in self.names:
|
|
||||||
|
|
||||||
# Skip files that end with certain extensions or characters
|
|
||||||
if any(i.endswith(ext) for ext in C.DEFAULT_INVENTORY_IGNORE):
|
|
||||||
continue
|
|
||||||
# Skip hidden files
|
|
||||||
if i.startswith('.') and not i.startswith('./'):
|
|
||||||
continue
|
|
||||||
# These are things inside of an inventory basedir
|
|
||||||
if i in ("host_vars", "group_vars", "vars_plugins"):
|
|
||||||
continue
|
|
||||||
fullpath = os.path.join(self.directory, i)
|
|
||||||
if os.path.isdir(fullpath):
|
|
||||||
parser = InventoryDirectory(loader=loader, groups=groups, filename=fullpath)
|
|
||||||
else:
|
|
||||||
parser = get_file_parser(fullpath, self.groups, loader)
|
|
||||||
if parser is None:
|
|
||||||
#FIXME: needs to use display
|
|
||||||
import warnings
|
|
||||||
warnings.warning("Could not find parser for %s, skipping" % fullpath)
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.parsers.append(parser)
|
|
||||||
|
|
||||||
# retrieve all groups and hosts form the parser and add them to
|
|
||||||
# self, don't look at group lists yet, to avoid
|
|
||||||
# recursion trouble, but just make sure all objects exist in self
|
|
||||||
newgroups = parser.groups.values()
|
|
||||||
for group in newgroups:
|
|
||||||
for host in group.hosts:
|
|
||||||
self._add_host(host)
|
|
||||||
for group in newgroups:
|
|
||||||
self._add_group(group)
|
|
||||||
|
|
||||||
# now check the objects lists so they contain only objects from
|
|
||||||
# self; membership data in groups is already fine (except all &
|
|
||||||
# ungrouped, see later), but might still reference objects not in self
|
|
||||||
for group in self.groups.values():
|
|
||||||
# iterate on a copy of the lists, as those lists get changed in
|
|
||||||
# the loop
|
|
||||||
# list with group's child group objects:
|
|
||||||
for child in group.child_groups[:]:
|
|
||||||
if child != self.groups[child.name]:
|
|
||||||
group.child_groups.remove(child)
|
|
||||||
group.child_groups.append(self.groups[child.name])
|
|
||||||
# list with group's parent group objects:
|
|
||||||
for parent in group.parent_groups[:]:
|
|
||||||
if parent != self.groups[parent.name]:
|
|
||||||
group.parent_groups.remove(parent)
|
|
||||||
group.parent_groups.append(self.groups[parent.name])
|
|
||||||
# list with group's host objects:
|
|
||||||
for host in group.hosts[:]:
|
|
||||||
if host != self.hosts[host.name]:
|
|
||||||
group.hosts.remove(host)
|
|
||||||
group.hosts.append(self.hosts[host.name])
|
|
||||||
# also check here that the group that contains host, is
|
|
||||||
# also contained in the host's group list
|
|
||||||
if group not in self.hosts[host.name].groups:
|
|
||||||
self.hosts[host.name].groups.append(group)
|
|
||||||
|
|
||||||
# extra checks on special groups all and ungrouped
|
|
||||||
# remove hosts from 'ungrouped' if they became member of other groups
|
|
||||||
if 'ungrouped' in self.groups:
|
|
||||||
ungrouped = self.groups['ungrouped']
|
|
||||||
# loop on a copy of ungrouped hosts, as we want to change that list
|
|
||||||
for host in frozenset(ungrouped.hosts):
|
|
||||||
if len(host.groups) > 1:
|
|
||||||
host.groups.remove(ungrouped)
|
|
||||||
ungrouped.hosts.remove(host)
|
|
||||||
|
|
||||||
# remove hosts from 'all' if they became member of other groups
|
|
||||||
# all should only contain direct children, not grandchildren
|
|
||||||
# direct children should have dept == 1
|
|
||||||
if 'all' in self.groups:
|
|
||||||
allgroup = self.groups['all' ]
|
|
||||||
# loop on a copy of all's child groups, as we want to change that list
|
|
||||||
for group in allgroup.child_groups[:]:
|
|
||||||
# groups might once have beeen added to all, and later be added
|
|
||||||
# to another group: we need to remove the link wit all then
|
|
||||||
if len(group.parent_groups) > 1 and allgroup in group.parent_groups:
|
|
||||||
# real children of all have just 1 parent, all
|
|
||||||
# this one has more, so not a direct child of all anymore
|
|
||||||
group.parent_groups.remove(allgroup)
|
|
||||||
allgroup.child_groups.remove(group)
|
|
||||||
elif allgroup not in group.parent_groups:
|
|
||||||
# this group was once added to all, but doesn't list it as
|
|
||||||
# a parent any more; the info in the group is the correct
|
|
||||||
# info
|
|
||||||
allgroup.child_groups.remove(group)
|
|
||||||
|
|
||||||
def _add_group(self, group):
|
|
||||||
""" Merge an existing group or add a new one;
|
|
||||||
Track parent and child groups, and hosts of the new one """
|
|
||||||
|
|
||||||
if group.name not in self.groups:
|
|
||||||
# it's brand new, add him!
|
|
||||||
self.groups[group.name] = group
|
|
||||||
# the Group class does not (yet) implement __eq__/__ne__,
|
|
||||||
# so unlike Host we do a regular comparison here
|
|
||||||
if self.groups[group.name] != group:
|
|
||||||
# different object, merge
|
|
||||||
self._merge_groups(self.groups[group.name], group)
|
|
||||||
|
|
||||||
def _add_host(self, host):
|
|
||||||
if host.name not in self.hosts:
|
|
||||||
# Papa's got a brand new host
|
|
||||||
self.hosts[host.name] = host
|
|
||||||
# because the __eq__/__ne__ methods in Host() compare the
|
|
||||||
# name fields rather than references, we use id() here to
|
|
||||||
# do the object comparison for merges
|
|
||||||
if self.hosts[host.name] != host:
|
|
||||||
# different object, merge
|
|
||||||
self._merge_hosts(self.hosts[host.name], host)
|
|
||||||
|
|
||||||
def _merge_groups(self, group, newgroup):
|
|
||||||
""" Merge all of instance newgroup into group,
|
|
||||||
update parent/child relationships
|
|
||||||
group lists may still contain group objects that exist in self with
|
|
||||||
same name, but was instanciated as a different object in some other
|
|
||||||
inventory parser; these are handled later """
|
|
||||||
|
|
||||||
# name
|
|
||||||
if group.name != newgroup.name:
|
|
||||||
raise AnsibleError("Cannot merge inventory group %s with %s" % (group.name, newgroup.name))
|
|
||||||
|
|
||||||
# depth
|
|
||||||
group.depth = max([group.depth, newgroup.depth])
|
|
||||||
|
|
||||||
# hosts list (host objects are by now already added to self.hosts)
|
|
||||||
for host in newgroup.hosts:
|
|
||||||
grouphosts = dict([(h.name, h) for h in group.hosts])
|
|
||||||
if host.name in grouphosts:
|
|
||||||
# same host name but different object, merge
|
|
||||||
self._merge_hosts(grouphosts[host.name], host)
|
|
||||||
else:
|
|
||||||
# new membership, add host to group from self
|
|
||||||
# group from self will also be added again to host.groups, but
|
|
||||||
# as different object
|
|
||||||
group.add_host(self.hosts[host.name])
|
|
||||||
# now remove this the old object for group in host.groups
|
|
||||||
for hostgroup in [g for g in host.groups]:
|
|
||||||
if hostgroup.name == group.name and hostgroup != self.groups[group.name]:
|
|
||||||
self.hosts[host.name].groups.remove(hostgroup)
|
|
||||||
|
|
||||||
# group child membership relation
|
|
||||||
for newchild in newgroup.child_groups:
|
|
||||||
# dict with existing child groups:
|
|
||||||
childgroups = dict([(g.name, g) for g in group.child_groups])
|
|
||||||
# check if child of new group is already known as a child
|
|
||||||
if newchild.name not in childgroups:
|
|
||||||
self.groups[group.name].add_child_group(newchild)
|
|
||||||
|
|
||||||
# group parent membership relation
|
|
||||||
for newparent in newgroup.parent_groups:
|
|
||||||
# dict with existing parent groups:
|
|
||||||
parentgroups = dict([(g.name, g) for g in group.parent_groups])
|
|
||||||
# check if parent of new group is already known as a parent
|
|
||||||
if newparent.name not in parentgroups:
|
|
||||||
if newparent.name not in self.groups:
|
|
||||||
# group does not exist yet in self, import him
|
|
||||||
self.groups[newparent.name] = newparent
|
|
||||||
# group now exists but not yet as a parent here
|
|
||||||
self.groups[newparent.name].add_child_group(group)
|
|
||||||
|
|
||||||
# variables
|
|
||||||
group.vars = combine_vars(group.vars, newgroup.vars)
|
|
||||||
|
|
||||||
def _merge_hosts(self,host, newhost):
|
|
||||||
""" Merge all of instance newhost into host """
|
|
||||||
|
|
||||||
# name
|
|
||||||
if host.name != newhost.name:
|
|
||||||
raise AnsibleError("Cannot merge host %s with %s" % (host.name, newhost.name))
|
|
||||||
|
|
||||||
# group membership relation
|
|
||||||
for newgroup in newhost.groups:
|
|
||||||
# dict with existing groups:
|
|
||||||
hostgroups = dict([(g.name, g) for g in host.groups])
|
|
||||||
# check if new group is already known as a group
|
|
||||||
if newgroup.name not in hostgroups:
|
|
||||||
if newgroup.name not in self.groups:
|
|
||||||
# group does not exist yet in self, import him
|
|
||||||
self.groups[newgroup.name] = newgroup
|
|
||||||
# group now exists but doesn't have host yet
|
|
||||||
self.groups[newgroup.name].add_host(host)
|
|
||||||
|
|
||||||
# variables
|
|
||||||
host.vars = combine_vars(host.vars, newhost.vars)
|
|
||||||
|
|
||||||
def get_host_variables(self, host):
|
|
||||||
""" Gets additional host variables from all inventories """
|
|
||||||
vars = {}
|
|
||||||
for i in self.parsers:
|
|
||||||
vars.update(i.get_host_variables(host))
|
|
||||||
return vars
|
|
@ -0,0 +1,35 @@
|
|||||||
|
# (c) 2017, Ansible by RedHat Inc,
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
#############################################
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible.utils.vars import combine_vars
|
||||||
|
|
||||||
|
|
||||||
|
def sort_groups(groups):
|
||||||
|
return sorted(groups, key=lambda g: (g.depth, g.priority, g.name))
|
||||||
|
|
||||||
|
|
||||||
|
def get_group_vars(groups):
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
for group in sort_groups(groups):
|
||||||
|
results = combine_vars(results, group.get_vars())
|
||||||
|
|
||||||
|
return results
|
@ -0,0 +1,599 @@
|
|||||||
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
#############################################
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import fnmatch
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
from ansible import constants as C
|
||||||
|
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
|
||||||
|
from ansible.inventory.data import InventoryData
|
||||||
|
from ansible.module_utils.six import string_types
|
||||||
|
from ansible.module_utils._text import to_bytes, to_text
|
||||||
|
from ansible.parsing.utils.addresses import parse_address
|
||||||
|
from ansible.plugins import PluginLoader
|
||||||
|
from ansible.utils.path import unfrackpath
|
||||||
|
|
||||||
|
try:
|
||||||
|
from __main__ import display
|
||||||
|
except ImportError:
|
||||||
|
from ansible.utils.display import Display
|
||||||
|
display = Display()
|
||||||
|
|
||||||
|
HOSTS_PATTERNS_CACHE = {}
|
||||||
|
|
||||||
|
IGNORED_ALWAYS = [b"^\.", b"^host_vars$", b"^group_vars$", b"^vars_plugins$"]
|
||||||
|
IGNORED_PATTERNS = [ to_bytes(x) for x in C.INVENTORY_IGNORE_PATTERNS ]
|
||||||
|
IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS]
|
||||||
|
|
||||||
|
IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS))
|
||||||
|
|
||||||
|
def order_patterns(patterns):
|
||||||
|
''' takes a list of patterns and reorders them by modifier to apply them consistently '''
|
||||||
|
|
||||||
|
# FIXME: this goes away if we apply patterns incrementally or by groups
|
||||||
|
pattern_regular = []
|
||||||
|
pattern_intersection = []
|
||||||
|
pattern_exclude = []
|
||||||
|
for p in patterns:
|
||||||
|
if p.startswith("!"):
|
||||||
|
pattern_exclude.append(p)
|
||||||
|
elif p.startswith("&"):
|
||||||
|
pattern_intersection.append(p)
|
||||||
|
elif p:
|
||||||
|
pattern_regular.append(p)
|
||||||
|
|
||||||
|
# if no regular pattern was given, hence only exclude and/or intersection
|
||||||
|
# make that magically work
|
||||||
|
if pattern_regular == []:
|
||||||
|
pattern_regular = ['all']
|
||||||
|
|
||||||
|
# when applying the host selectors, run those without the "&" or "!"
|
||||||
|
# first, then the &s, then the !s.
|
||||||
|
return pattern_regular + pattern_intersection + pattern_exclude
|
||||||
|
|
||||||
|
|
||||||
|
def split_host_pattern(pattern):
|
||||||
|
"""
|
||||||
|
Takes a string containing host patterns separated by commas (or a list
|
||||||
|
thereof) and returns a list of single patterns (which may not contain
|
||||||
|
commas). Whitespace is ignored.
|
||||||
|
|
||||||
|
Also accepts ':' as a separator for backwards compatibility, but it is
|
||||||
|
not recommended due to the conflict with IPv6 addresses and host ranges.
|
||||||
|
|
||||||
|
Example: 'a,b[1], c[2:3] , d' -> ['a', 'b[1]', 'c[2:3]', 'd']
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(pattern, list):
|
||||||
|
return list(itertools.chain(*map(split_host_pattern, pattern)))
|
||||||
|
|
||||||
|
# If it's got commas in it, we'll treat it as a straightforward
|
||||||
|
# comma-separated list of patterns.
|
||||||
|
|
||||||
|
elif ',' in pattern:
|
||||||
|
patterns = re.split('\s*,\s*', pattern)
|
||||||
|
|
||||||
|
# If it doesn't, it could still be a single pattern. This accounts for
|
||||||
|
# non-separator uses of colons: IPv6 addresses and [x:y] host ranges.
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
(base, port) = parse_address(pattern, allow_ranges=True)
|
||||||
|
patterns = [pattern]
|
||||||
|
except:
|
||||||
|
# The only other case we accept is a ':'-separated list of patterns.
|
||||||
|
# This mishandles IPv6 addresses, and is retained only for backwards
|
||||||
|
# compatibility.
|
||||||
|
patterns = re.findall(
|
||||||
|
r'''(?: # We want to match something comprising:
|
||||||
|
[^\s:\[\]] # (anything other than whitespace or ':[]'
|
||||||
|
| # ...or...
|
||||||
|
\[[^\]]*\] # a single complete bracketed expression)
|
||||||
|
)+ # occurring once or more
|
||||||
|
''', pattern, re.X
|
||||||
|
)
|
||||||
|
|
||||||
|
return [p.strip() for p in patterns]
|
||||||
|
|
||||||
|
class InventoryManager(object):
|
||||||
|
''' Creates and manages inventory '''
|
||||||
|
|
||||||
|
def __init__(self, loader, sources=None):
|
||||||
|
|
||||||
|
# base objects
|
||||||
|
self._loader = loader
|
||||||
|
self._inventory = InventoryData()
|
||||||
|
|
||||||
|
# a list of host(names) to contain current inquiries to
|
||||||
|
self._restriction = None
|
||||||
|
self._subset = None
|
||||||
|
|
||||||
|
# caches
|
||||||
|
self._pattern_cache = {} # resolved host patterns
|
||||||
|
self._inventory_plugins = {} # for generating inventory
|
||||||
|
|
||||||
|
# the inventory dirs, files, script paths or lists of hosts
|
||||||
|
if sources is None:
|
||||||
|
self._sources = []
|
||||||
|
elif isinstance(sources, string_types):
|
||||||
|
self._sources = [ sources ]
|
||||||
|
else:
|
||||||
|
self._sources = sources
|
||||||
|
|
||||||
|
# get to work!
|
||||||
|
self.parse_sources()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def localhost(self):
|
||||||
|
return self._inventory.localhost
|
||||||
|
|
||||||
|
@property
|
||||||
|
def groups(self):
|
||||||
|
return self._inventory.groups
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hosts(self):
|
||||||
|
return self._inventory.hosts
|
||||||
|
|
||||||
|
def get_vars(self, *args, **kwargs):
|
||||||
|
return self._inventory.get_vars(args, kwargs)
|
||||||
|
|
||||||
|
def add_host(self, host, group=None, port=None):
|
||||||
|
return self._inventory.add_host(host, group, port)
|
||||||
|
|
||||||
|
def add_group(self, group):
|
||||||
|
return self._inventory.add_group(group)
|
||||||
|
|
||||||
|
def get_groups_dict(self):
|
||||||
|
return self._inventory.get_groups_dict()
|
||||||
|
|
||||||
|
def reconcile_inventory(self):
|
||||||
|
return self._inventory.reconcile_inventory()
|
||||||
|
|
||||||
|
def get_host(self, hostname):
|
||||||
|
return self._inventory.get_host(hostname)
|
||||||
|
|
||||||
|
def _setup_inventory_plugins(self):
|
||||||
|
''' sets up loaded inventory plugins for usage '''
|
||||||
|
|
||||||
|
inventory_loader = PluginLoader( 'InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
|
||||||
|
display.vvvv('setting up inventory plugins')
|
||||||
|
|
||||||
|
for name in C.INVENTORY_ENABLED:
|
||||||
|
plugin = inventory_loader.get(name)
|
||||||
|
name = os.path.splitext(os.path.basename(plugin._original_path))[0]
|
||||||
|
self._inventory_plugins[name] = plugin
|
||||||
|
|
||||||
|
if not self._inventory_plugins:
|
||||||
|
raise AnsibleError("No inventory plugins available to generate inventory, make sure you have at least one whitelisted.")
|
||||||
|
|
||||||
|
def parse_sources(self, cache=True):
|
||||||
|
''' iterate over inventory sources and parse each one to populate it'''
|
||||||
|
|
||||||
|
self._setup_inventory_plugins()
|
||||||
|
|
||||||
|
parsed = False
|
||||||
|
# allow for multiple inventory parsing
|
||||||
|
for source in self._sources:
|
||||||
|
|
||||||
|
if source:
|
||||||
|
if ',' not in source:
|
||||||
|
source = unfrackpath(source, follow=False)
|
||||||
|
parse = self.parse_source(source, cache=cache)
|
||||||
|
if parse and not parsed:
|
||||||
|
parsed = True
|
||||||
|
|
||||||
|
if parsed:
|
||||||
|
# do post processing
|
||||||
|
self._inventory.reconcile_inventory()
|
||||||
|
else:
|
||||||
|
display.warning("No inventory was parsed, only implicit localhost is available")
|
||||||
|
|
||||||
|
self._inventory_plugins = {}
|
||||||
|
|
||||||
|
def parse_source(self, source, cache=True):
|
||||||
|
''' Generate or update inventory for the source provided '''
|
||||||
|
|
||||||
|
parsed = False
|
||||||
|
display.debug(u'Examining possible inventory source: %s' % source)
|
||||||
|
|
||||||
|
b_source = to_bytes(source)
|
||||||
|
# process directories as a collection of inventories
|
||||||
|
if os.path.isdir(b_source):
|
||||||
|
display.debug(u'Searching for inventory files in directory: %s' % source)
|
||||||
|
for i in sorted(os.listdir(b_source)):
|
||||||
|
|
||||||
|
display.debug(u'Considering %s' % i)
|
||||||
|
# Skip hidden files and stuff we explicitly ignore
|
||||||
|
if IGNORED.search(i):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# recursively deal with directory entries
|
||||||
|
fullpath = os.path.join(b_source, i)
|
||||||
|
parsed_this_one = self.parse_source(to_text(fullpath))
|
||||||
|
display.debug(u'parsed %s as %s' % (fullpath, parsed_this_one))
|
||||||
|
if not parsed:
|
||||||
|
parsed = parsed_this_one
|
||||||
|
else:
|
||||||
|
# left with strings or files, let plugins figure it out
|
||||||
|
|
||||||
|
# set so new hosts can use for inventory_file/dir vasr
|
||||||
|
self._inventory.current_source = source
|
||||||
|
|
||||||
|
# get inventory plugins if needed, there should always be at least one generator
|
||||||
|
if not self._inventory_plugins:
|
||||||
|
self._setup_inventory_plugins()
|
||||||
|
|
||||||
|
# try source with each plugin
|
||||||
|
failures = []
|
||||||
|
for plugin in self._inventory_plugins:
|
||||||
|
display.debug(u'Attempting to use plugin %s' % plugin)
|
||||||
|
|
||||||
|
# initialize
|
||||||
|
inv = self._inventory_plugins[plugin]
|
||||||
|
if inv.verify_file(source):
|
||||||
|
try:
|
||||||
|
inv.parse(self._inventory, self._loader, source, cache=cache)
|
||||||
|
parsed = True
|
||||||
|
display.vvv(u'Parsed %s inventory source with %s plugin' % (to_text(source), plugin))
|
||||||
|
break
|
||||||
|
except AnsibleParserError as e:
|
||||||
|
failures.append(u'\n* Failed to parse %s with %s inventory plugin: %s\n' %(to_text(source), plugin, to_text(e)))
|
||||||
|
else:
|
||||||
|
display.debug(u'%s did not meet %s requirements' % (to_text(source), plugin))
|
||||||
|
else:
|
||||||
|
if failures:
|
||||||
|
# only if no plugin processed files should we show errors.
|
||||||
|
for fail in failures:
|
||||||
|
display.warning(fail)
|
||||||
|
|
||||||
|
if not parsed:
|
||||||
|
display.warning(u"Unable to parse %s as an inventory source" % to_text(source))
|
||||||
|
|
||||||
|
# clear up, jic
|
||||||
|
self._inventory.current_source = None
|
||||||
|
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
def clear_caches(self):
|
||||||
|
''' clear all caches '''
|
||||||
|
global HOSTS_PATTERNS_CACHE
|
||||||
|
HOSTS_PATTERNS_CACHE = {}
|
||||||
|
self._pattern_cache = {}
|
||||||
|
#FIXME: flush inventory cache
|
||||||
|
|
||||||
|
def refresh_inventory(self):
|
||||||
|
''' recalculate inventory '''
|
||||||
|
|
||||||
|
self.clear_caches()
|
||||||
|
self._inventory = InventoryData()
|
||||||
|
self.parse_sources(cache=False)
|
||||||
|
|
||||||
|
def _match(self, string, pattern_str):
|
||||||
|
try:
|
||||||
|
if pattern_str.startswith('~'):
|
||||||
|
return re.search(pattern_str[1:], string)
|
||||||
|
else:
|
||||||
|
return fnmatch.fnmatch(string, pattern_str)
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleError('invalid host pattern (%s): %s' % (pattern_str, str(e)))
|
||||||
|
|
||||||
|
def _match_list(self, items, item_attr, pattern_str):
|
||||||
|
results = []
|
||||||
|
try:
|
||||||
|
if not pattern_str.startswith('~'):
|
||||||
|
pattern = re.compile(fnmatch.translate(pattern_str))
|
||||||
|
else:
|
||||||
|
pattern = re.compile(pattern_str[1:])
|
||||||
|
except Exception:
|
||||||
|
raise AnsibleError('invalid host list pattern: %s' % pattern_str)
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
if pattern.match(getattr(item, item_attr)):
|
||||||
|
results.append(item)
|
||||||
|
return results
|
||||||
|
|
||||||
|
def get_hosts(self, pattern="all", ignore_limits=False, ignore_restrictions=False, order=None):
|
||||||
|
"""
|
||||||
|
Takes a pattern or list of patterns and returns a list of matching
|
||||||
|
inventory host names, taking into account any active restrictions
|
||||||
|
or applied subsets
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Check if pattern already computed
|
||||||
|
if isinstance(pattern, list):
|
||||||
|
pattern_hash = u":".join(pattern)
|
||||||
|
else:
|
||||||
|
pattern_hash = pattern
|
||||||
|
|
||||||
|
if not ignore_limits and self._subset:
|
||||||
|
pattern_hash += u":%s" % to_text(self._subset)
|
||||||
|
|
||||||
|
if not ignore_restrictions and self._restriction:
|
||||||
|
pattern_hash += u":%s" % to_text(self._restriction)
|
||||||
|
|
||||||
|
if pattern_hash not in HOSTS_PATTERNS_CACHE:
|
||||||
|
|
||||||
|
patterns = split_host_pattern(pattern)
|
||||||
|
hosts = self._evaluate_patterns(patterns)
|
||||||
|
|
||||||
|
# mainly useful for hostvars[host] access
|
||||||
|
if not ignore_limits and self._subset:
|
||||||
|
# exclude hosts not in a subset, if defined
|
||||||
|
subset = self._evaluate_patterns(self._subset)
|
||||||
|
hosts = [ h for h in hosts if h in subset ]
|
||||||
|
|
||||||
|
if not ignore_restrictions and self._restriction:
|
||||||
|
# exclude hosts mentioned in any restriction (ex: failed hosts)
|
||||||
|
hosts = [ h for h in hosts if h.name in self._restriction ]
|
||||||
|
|
||||||
|
seen = set()
|
||||||
|
HOSTS_PATTERNS_CACHE[pattern_hash] = [x for x in hosts if x not in seen and not seen.add(x)]
|
||||||
|
|
||||||
|
# sort hosts list if needed (should only happen when called from strategy)
|
||||||
|
if order in ['sorted', 'reverse_sorted']:
|
||||||
|
from operator import attrgetter
|
||||||
|
hosts = sorted(HOSTS_PATTERNS_CACHE[pattern_hash][:], key=attrgetter('name'), reverse=(order == 'reverse_sorted'))
|
||||||
|
elif order == 'reverse_inventory':
|
||||||
|
hosts = sorted(HOSTS_PATTERNS_CACHE[pattern_hash][:], reverse=True)
|
||||||
|
else:
|
||||||
|
hosts = HOSTS_PATTERNS_CACHE[pattern_hash][:]
|
||||||
|
if order == 'shuffle':
|
||||||
|
from random import shuffle
|
||||||
|
shuffle(hosts)
|
||||||
|
elif order not in [None, 'inventory']:
|
||||||
|
AnsibleOptionsError("Invalid 'order' specified for inventory hosts: %s" % order)
|
||||||
|
|
||||||
|
return hosts
|
||||||
|
|
||||||
|
|
||||||
|
def _evaluate_patterns(self, patterns):
|
||||||
|
"""
|
||||||
|
Takes a list of patterns and returns a list of matching host names,
|
||||||
|
taking into account any negative and intersection patterns.
|
||||||
|
"""
|
||||||
|
|
||||||
|
patterns = order_patterns(patterns)
|
||||||
|
hosts = []
|
||||||
|
|
||||||
|
for p in patterns:
|
||||||
|
# avoid resolving a pattern that is a plain host
|
||||||
|
if p in self._inventory.hosts:
|
||||||
|
hosts.append(self._inventory.get_host(p))
|
||||||
|
else:
|
||||||
|
that = self._match_one_pattern(p)
|
||||||
|
if p.startswith("!"):
|
||||||
|
hosts = [ h for h in hosts if h not in that ]
|
||||||
|
elif p.startswith("&"):
|
||||||
|
hosts = [ h for h in hosts if h in that ]
|
||||||
|
else:
|
||||||
|
to_append = [ h for h in that if h.name not in [ y.name for y in hosts ] ]
|
||||||
|
hosts.extend(to_append)
|
||||||
|
return hosts
|
||||||
|
|
||||||
|
def _match_one_pattern(self, pattern):
|
||||||
|
"""
|
||||||
|
Takes a single pattern and returns a list of matching host names.
|
||||||
|
Ignores intersection (&) and exclusion (!) specifiers.
|
||||||
|
|
||||||
|
The pattern may be:
|
||||||
|
|
||||||
|
1. A regex starting with ~, e.g. '~[abc]*'
|
||||||
|
2. A shell glob pattern with ?/*/[chars]/[!chars], e.g. 'foo*'
|
||||||
|
3. An ordinary word that matches itself only, e.g. 'foo'
|
||||||
|
|
||||||
|
The pattern is matched using the following rules:
|
||||||
|
|
||||||
|
1. If it's 'all', it matches all hosts in all groups.
|
||||||
|
2. Otherwise, for each known group name:
|
||||||
|
(a) if it matches the group name, the results include all hosts
|
||||||
|
in the group or any of its children.
|
||||||
|
(b) otherwise, if it matches any hosts in the group, the results
|
||||||
|
include the matching hosts.
|
||||||
|
|
||||||
|
This means that 'foo*' may match one or more groups (thus including all
|
||||||
|
hosts therein) but also hosts in other groups.
|
||||||
|
|
||||||
|
The built-in groups 'all' and 'ungrouped' are special. No pattern can
|
||||||
|
match these group names (though 'all' behaves as though it matches, as
|
||||||
|
described above). The word 'ungrouped' can match a host of that name,
|
||||||
|
and patterns like 'ungr*' and 'al*' can match either hosts or groups
|
||||||
|
other than all and ungrouped.
|
||||||
|
|
||||||
|
If the pattern matches one or more group names according to these rules,
|
||||||
|
it may have an optional range suffix to select a subset of the results.
|
||||||
|
This is allowed only if the pattern is not a regex, i.e. '~foo[1]' does
|
||||||
|
not work (the [1] is interpreted as part of the regex), but 'foo*[1]'
|
||||||
|
would work if 'foo*' matched the name of one or more groups.
|
||||||
|
|
||||||
|
Duplicate matches are always eliminated from the results.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if pattern.startswith("&") or pattern.startswith("!"):
|
||||||
|
pattern = pattern[1:]
|
||||||
|
|
||||||
|
if pattern not in self._pattern_cache:
|
||||||
|
(expr, slice) = self._split_subscript(pattern)
|
||||||
|
hosts = self._enumerate_matches(expr)
|
||||||
|
try:
|
||||||
|
hosts = self._apply_subscript(hosts, slice)
|
||||||
|
except IndexError:
|
||||||
|
raise AnsibleError("No hosts matched the subscripted pattern '%s'" % pattern)
|
||||||
|
self._pattern_cache[pattern] = hosts
|
||||||
|
|
||||||
|
return self._pattern_cache[pattern]
|
||||||
|
|
||||||
|
def _split_subscript(self, pattern):
|
||||||
|
"""
|
||||||
|
Takes a pattern, checks if it has a subscript, and returns the pattern
|
||||||
|
without the subscript and a (start,end) tuple representing the given
|
||||||
|
subscript (or None if there is no subscript).
|
||||||
|
|
||||||
|
Validates that the subscript is in the right syntax, but doesn't make
|
||||||
|
sure the actual indices make sense in context.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Do not parse regexes for enumeration info
|
||||||
|
if pattern.startswith('~'):
|
||||||
|
return (pattern, None)
|
||||||
|
|
||||||
|
# We want a pattern followed by an integer or range subscript.
|
||||||
|
# (We can't be more restrictive about the expression because the
|
||||||
|
# fnmatch semantics permit [\[:\]] to occur.)
|
||||||
|
|
||||||
|
pattern_with_subscript = re.compile(
|
||||||
|
r'''^
|
||||||
|
(.+) # A pattern expression ending with...
|
||||||
|
\[(?: # A [subscript] expression comprising:
|
||||||
|
(-?[0-9]+)| # A single positive or negative number
|
||||||
|
([0-9]+)([:-]) # Or an x:y or x: range.
|
||||||
|
([0-9]*)
|
||||||
|
)\]
|
||||||
|
$
|
||||||
|
''', re.X
|
||||||
|
)
|
||||||
|
|
||||||
|
subscript = None
|
||||||
|
m = pattern_with_subscript.match(pattern)
|
||||||
|
if m:
|
||||||
|
(pattern, idx, start, sep, end) = m.groups()
|
||||||
|
if idx:
|
||||||
|
subscript = (int(idx), None)
|
||||||
|
else:
|
||||||
|
if not end:
|
||||||
|
end = -1
|
||||||
|
subscript = (int(start), int(end))
|
||||||
|
if sep == '-':
|
||||||
|
display.warning("Use [x:y] inclusive subscripts instead of [x-y] which has been removed")
|
||||||
|
|
||||||
|
return (pattern, subscript)
|
||||||
|
|
||||||
|
def _apply_subscript(self, hosts, subscript):
|
||||||
|
"""
|
||||||
|
Takes a list of hosts and a (start,end) tuple and returns the subset of
|
||||||
|
hosts based on the subscript (which may be None to return all hosts).
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not hosts or not subscript:
|
||||||
|
return hosts
|
||||||
|
|
||||||
|
(start, end) = subscript
|
||||||
|
|
||||||
|
if end:
|
||||||
|
if end == -1:
|
||||||
|
end = len(hosts)-1
|
||||||
|
return hosts[start:end+1]
|
||||||
|
else:
|
||||||
|
return [ hosts[start] ]
|
||||||
|
|
||||||
|
def _enumerate_matches(self, pattern):
|
||||||
|
"""
|
||||||
|
Returns a list of host names matching the given pattern according to the
|
||||||
|
rules explained above in _match_one_pattern.
|
||||||
|
"""
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
def __append_host_to_results(host):
|
||||||
|
if host.name not in results:
|
||||||
|
if not host.implicit:
|
||||||
|
results.append(host)
|
||||||
|
|
||||||
|
matched = False
|
||||||
|
for group in self._inventory.groups.values():
|
||||||
|
if self._match(to_text(group.name), pattern):
|
||||||
|
matched = True
|
||||||
|
for host in group.get_hosts():
|
||||||
|
__append_host_to_results(host)
|
||||||
|
else:
|
||||||
|
matching_hosts = self._match_list(group.get_hosts(), 'name', pattern)
|
||||||
|
if matching_hosts:
|
||||||
|
matched = True
|
||||||
|
for host in matching_hosts:
|
||||||
|
__append_host_to_results(host)
|
||||||
|
|
||||||
|
if not results and pattern in C.LOCALHOST:
|
||||||
|
# get_host autocreates implicit when needed
|
||||||
|
implicit = self._inventory.get_host(pattern)
|
||||||
|
if implicit:
|
||||||
|
results.append(implicit)
|
||||||
|
matched = True
|
||||||
|
|
||||||
|
if not matched:
|
||||||
|
display.warning("Could not match supplied host pattern, ignoring: %s" % pattern)
|
||||||
|
return results
|
||||||
|
|
||||||
|
def list_hosts(self, pattern="all"):
|
||||||
|
""" return a list of hostnames for a pattern """
|
||||||
|
#FIXME: cache?
|
||||||
|
result = [ h for h in self.get_hosts(pattern) ]
|
||||||
|
|
||||||
|
# allow implicit localhost if pattern matches and no other results
|
||||||
|
if len(result) == 0 and pattern in C.LOCALHOST:
|
||||||
|
result = [pattern]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def list_groups(self):
|
||||||
|
#FIXME: cache?
|
||||||
|
return sorted(self._inventory.groups.keys(), key=lambda x: x)
|
||||||
|
|
||||||
|
def restrict_to_hosts(self, restriction):
|
||||||
|
"""
|
||||||
|
Restrict list operations to the hosts given in restriction. This is used
|
||||||
|
to batch serial operations in main playbook code, don't use this for other
|
||||||
|
reasons.
|
||||||
|
"""
|
||||||
|
if restriction is None:
|
||||||
|
return
|
||||||
|
elif not isinstance(restriction, list):
|
||||||
|
restriction = [ restriction ]
|
||||||
|
self._restriction = [ h.name for h in restriction ]
|
||||||
|
|
||||||
|
def subset(self, subset_pattern):
|
||||||
|
"""
|
||||||
|
Limits inventory results to a subset of inventory that matches a given
|
||||||
|
pattern, such as to select a given geographic of numeric slice amongst
|
||||||
|
a previous 'hosts' selection that only select roles, or vice versa.
|
||||||
|
Corresponds to --limit parameter to ansible-playbook
|
||||||
|
"""
|
||||||
|
if subset_pattern is None:
|
||||||
|
self._subset = None
|
||||||
|
else:
|
||||||
|
subset_patterns = split_host_pattern(subset_pattern)
|
||||||
|
results = []
|
||||||
|
# allow Unix style @filename data
|
||||||
|
for x in subset_patterns:
|
||||||
|
if x.startswith("@"):
|
||||||
|
fd = open(x[1:])
|
||||||
|
results.extend(fd.read().split("\n"))
|
||||||
|
fd.close()
|
||||||
|
else:
|
||||||
|
results.append(x)
|
||||||
|
self._subset = results
|
||||||
|
|
||||||
|
def remove_restriction(self):
|
||||||
|
""" Do not restrict list operations """
|
||||||
|
self._restriction = None
|
||||||
|
|
||||||
|
def clear_pattern_cache(self):
|
||||||
|
self._pattern_cache = {}
|
@ -1,170 +0,0 @@
|
|||||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
#############################################
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from collections import Mapping
|
|
||||||
|
|
||||||
from ansible import constants as C
|
|
||||||
from ansible.errors import AnsibleError
|
|
||||||
from ansible.inventory.host import Host
|
|
||||||
from ansible.inventory.group import Group
|
|
||||||
from ansible.module_utils.basic import json_dict_bytes_to_unicode
|
|
||||||
from ansible.module_utils.six import iteritems
|
|
||||||
from ansible.module_utils._text import to_native, to_text
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryScript:
|
|
||||||
''' Host inventory parser for ansible using external inventory scripts. '''
|
|
||||||
|
|
||||||
def __init__(self, loader, groups=None, filename=C.DEFAULT_HOST_LIST):
|
|
||||||
if groups is None:
|
|
||||||
groups = dict()
|
|
||||||
|
|
||||||
self._loader = loader
|
|
||||||
self.groups = groups
|
|
||||||
|
|
||||||
# Support inventory scripts that are not prefixed with some
|
|
||||||
# path information but happen to be in the current working
|
|
||||||
# directory when '.' is not in PATH.
|
|
||||||
self.filename = os.path.abspath(filename)
|
|
||||||
cmd = [ self.filename, "--list" ]
|
|
||||||
try:
|
|
||||||
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
except OSError as e:
|
|
||||||
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
|
|
||||||
(stdout, stderr) = sp.communicate()
|
|
||||||
|
|
||||||
if sp.returncode != 0:
|
|
||||||
raise AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr))
|
|
||||||
|
|
||||||
# make sure script output is unicode so that json loader will output
|
|
||||||
# unicode strings itself
|
|
||||||
try:
|
|
||||||
self.data = to_text(stdout, errors="strict")
|
|
||||||
except Exception as e:
|
|
||||||
raise AnsibleError("inventory data from {0} contained characters that cannot be interpreted as UTF-8: {1}".format(to_native(self.filename),
|
|
||||||
to_native(e)))
|
|
||||||
|
|
||||||
# see comment about _meta below
|
|
||||||
self.host_vars_from_top = None
|
|
||||||
self._parse(stderr)
|
|
||||||
|
|
||||||
def _parse(self, err):
|
|
||||||
|
|
||||||
all_hosts = {}
|
|
||||||
|
|
||||||
# not passing from_remote because data from CMDB is trusted
|
|
||||||
try:
|
|
||||||
self.raw = self._loader.load(self.data)
|
|
||||||
except Exception as e:
|
|
||||||
sys.stderr.write(to_native(err) + "\n")
|
|
||||||
raise AnsibleError("failed to parse executable inventory script results from {0}: {1}".format(to_native(self.filename), to_native(e)))
|
|
||||||
|
|
||||||
if not isinstance(self.raw, Mapping):
|
|
||||||
sys.stderr.write(to_native(err) + "\n")
|
|
||||||
raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted "
|
|
||||||
"as a json dict".format(to_native(self.filename)))
|
|
||||||
|
|
||||||
group = None
|
|
||||||
for (group_name, data) in self.raw.items():
|
|
||||||
|
|
||||||
# in Ansible 1.3 and later, a "_meta" subelement may contain
|
|
||||||
# a variable "hostvars" which contains a hash for each host
|
|
||||||
# if this "hostvars" exists at all then do not call --host for each
|
|
||||||
# host. This is for efficiency and scripts should still return data
|
|
||||||
# if called with --host for backwards compat with 1.2 and earlier.
|
|
||||||
|
|
||||||
if group_name == '_meta':
|
|
||||||
if 'hostvars' in data:
|
|
||||||
self.host_vars_from_top = data['hostvars']
|
|
||||||
continue
|
|
||||||
|
|
||||||
if group_name not in self.groups:
|
|
||||||
group = self.groups[group_name] = Group(group_name)
|
|
||||||
|
|
||||||
group = self.groups[group_name]
|
|
||||||
host = None
|
|
||||||
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
data = {'hosts': data}
|
|
||||||
# is not those subkeys, then simplified syntax, host with vars
|
|
||||||
elif not any(k in data for k in ('hosts','vars','children')):
|
|
||||||
data = {'hosts': [group_name], 'vars': data}
|
|
||||||
|
|
||||||
if 'hosts' in data:
|
|
||||||
if not isinstance(data['hosts'], list):
|
|
||||||
raise AnsibleError("You defined a group \"%s\" with bad "
|
|
||||||
"data for the host list:\n %s" % (group_name, data))
|
|
||||||
|
|
||||||
for hostname in data['hosts']:
|
|
||||||
if hostname not in all_hosts:
|
|
||||||
all_hosts[hostname] = Host(hostname)
|
|
||||||
host = all_hosts[hostname]
|
|
||||||
group.add_host(host)
|
|
||||||
|
|
||||||
if 'vars' in data:
|
|
||||||
if not isinstance(data['vars'], dict):
|
|
||||||
raise AnsibleError("You defined a group \"%s\" with bad "
|
|
||||||
"data for variables:\n %s" % (group_name, data))
|
|
||||||
|
|
||||||
for k, v in iteritems(data['vars']):
|
|
||||||
group.set_variable(k, v)
|
|
||||||
|
|
||||||
# Separate loop to ensure all groups are defined
|
|
||||||
for (group_name, data) in self.raw.items():
|
|
||||||
if group_name == '_meta':
|
|
||||||
continue
|
|
||||||
if isinstance(data, dict) and 'children' in data:
|
|
||||||
for child_name in data['children']:
|
|
||||||
if child_name in self.groups:
|
|
||||||
self.groups[group_name].add_child_group(self.groups[child_name])
|
|
||||||
|
|
||||||
# Finally, add all top-level groups as children of 'all'.
|
|
||||||
# We exclude ungrouped here because it was already added as a child of
|
|
||||||
# 'all' at the time it was created.
|
|
||||||
|
|
||||||
for group in self.groups.values():
|
|
||||||
if group.depth == 0 and group.name not in ('all', 'ungrouped'):
|
|
||||||
self.groups['all'].add_child_group(group)
|
|
||||||
|
|
||||||
def get_host_variables(self, host):
|
|
||||||
""" Runs <script> --host <hostname> to determine additional host variables """
|
|
||||||
if self.host_vars_from_top is not None:
|
|
||||||
try:
|
|
||||||
got = self.host_vars_from_top.get(host.name, {})
|
|
||||||
except AttributeError as e:
|
|
||||||
raise AnsibleError("Improperly formatted host information for %s: %s" % (host.name,to_native(e)))
|
|
||||||
return got
|
|
||||||
|
|
||||||
cmd = [self.filename, "--host", host.name]
|
|
||||||
try:
|
|
||||||
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
except OSError as e:
|
|
||||||
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
|
|
||||||
(out, err) = sp.communicate()
|
|
||||||
if out.strip() == '':
|
|
||||||
return dict()
|
|
||||||
try:
|
|
||||||
return json_dict_bytes_to_unicode(self._loader.load(out))
|
|
||||||
except ValueError:
|
|
||||||
raise AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
|
|
@ -1,4 +0,0 @@
|
|||||||
# Make coding more python3-ish
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
|||||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
|
||||||
# (c) 2014, Serge van Ginderachter <serge@vanginderachter.be>
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
class VarsModule(object):
|
|
||||||
|
|
||||||
"""
|
|
||||||
Loads variables for groups and/or hosts
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, inventory):
|
|
||||||
|
|
||||||
""" constructor """
|
|
||||||
|
|
||||||
self.inventory = inventory
|
|
||||||
self.inventory_basedir = inventory.basedir()
|
|
||||||
|
|
||||||
|
|
||||||
def run(self, host, vault_password=None):
|
|
||||||
""" For backwards compatibility, when only vars per host were retrieved
|
|
||||||
This method should return both host specific vars as well as vars
|
|
||||||
calculated from groups it is a member of """
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def get_host_vars(self, host, vault_password=None):
|
|
||||||
""" Get host specific variables. """
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def get_group_vars(self, group, vault_password=None):
|
|
||||||
""" Get group specific variables. """
|
|
||||||
return {}
|
|
||||||
|
|
@ -1,170 +0,0 @@
|
|||||||
# Copyright 2016 RedHat, inc
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
#############################################
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
from ansible import constants as C
|
|
||||||
from ansible.inventory.host import Host
|
|
||||||
from ansible.inventory.group import Group
|
|
||||||
from ansible.inventory.expand_hosts import detect_range
|
|
||||||
from ansible.inventory.expand_hosts import expand_hostname_range
|
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.parsing.utils.addresses import parse_address
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryParser(object):
|
|
||||||
"""
|
|
||||||
Takes a YAML-format inventory file and builds a list of groups and subgroups
|
|
||||||
with their associated hosts and variable settings.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, loader, groups, filename=C.DEFAULT_HOST_LIST):
|
|
||||||
self._loader = loader
|
|
||||||
self.filename = filename
|
|
||||||
|
|
||||||
# Start with an empty host list and whatever groups we're passed in
|
|
||||||
# (which should include the default 'all' and 'ungrouped' groups).
|
|
||||||
|
|
||||||
self.hosts = {}
|
|
||||||
self.patterns = {}
|
|
||||||
self.groups = groups
|
|
||||||
|
|
||||||
# Read in the hosts, groups, and variables defined in the
|
|
||||||
# inventory file.
|
|
||||||
data = loader.load_from_file(filename)
|
|
||||||
|
|
||||||
self._parse(data)
|
|
||||||
|
|
||||||
def _parse(self, data):
|
|
||||||
'''
|
|
||||||
Populates self.groups from the given array of lines. Raises an error on
|
|
||||||
any parse failure.
|
|
||||||
'''
|
|
||||||
|
|
||||||
self._compile_patterns()
|
|
||||||
|
|
||||||
# We expect top level keys to correspond to groups, iterate over them
|
|
||||||
# to get host, vars and subgroups (which we iterate over recursivelly)
|
|
||||||
for group_name in data.keys():
|
|
||||||
self._parse_groups(group_name, data[group_name])
|
|
||||||
|
|
||||||
# Finally, add all top-level groups as children of 'all'.
|
|
||||||
# We exclude ungrouped here because it was already added as a child of
|
|
||||||
# 'all' at the time it was created.
|
|
||||||
for group in self.groups.values():
|
|
||||||
if group.depth == 0 and group.name not in ('all', 'ungrouped'):
|
|
||||||
self.groups['all'].add_child_group(group)
|
|
||||||
|
|
||||||
def _parse_groups(self, group, group_data):
|
|
||||||
|
|
||||||
if group not in self.groups:
|
|
||||||
self.groups[group] = Group(name=group)
|
|
||||||
|
|
||||||
if isinstance(group_data, dict):
|
|
||||||
#make sure they are dicts
|
|
||||||
for section in ['vars', 'children', 'hosts']:
|
|
||||||
if section in group_data and isinstance(group_data[section], string_types):
|
|
||||||
group_data[section] = { group_data[section]: None}
|
|
||||||
|
|
||||||
if 'vars' in group_data:
|
|
||||||
for var in group_data['vars']:
|
|
||||||
self.groups[group].set_variable(var, group_data['vars'][var])
|
|
||||||
|
|
||||||
if 'children' in group_data:
|
|
||||||
for subgroup in group_data['children']:
|
|
||||||
self._parse_groups(subgroup, group_data['children'][subgroup])
|
|
||||||
self.groups[group].add_child_group(self.groups[subgroup])
|
|
||||||
|
|
||||||
if 'hosts' in group_data:
|
|
||||||
for host_pattern in group_data['hosts']:
|
|
||||||
hosts = self._parse_host(host_pattern, group_data['hosts'][host_pattern])
|
|
||||||
for h in hosts:
|
|
||||||
self.groups[group].add_host(h)
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_host(self, host_pattern, host_data):
|
|
||||||
'''
|
|
||||||
Each host key can be a pattern, try to process it and add variables as needed
|
|
||||||
'''
|
|
||||||
(hostnames, port) = self._expand_hostpattern(host_pattern)
|
|
||||||
hosts = self._Hosts(hostnames, port)
|
|
||||||
|
|
||||||
if isinstance(host_data, dict):
|
|
||||||
for k in host_data:
|
|
||||||
for h in hosts:
|
|
||||||
h.set_variable(k, host_data[k])
|
|
||||||
if k in ['ansible_host', 'ansible_ssh_host']:
|
|
||||||
h.address = host_data[k]
|
|
||||||
return hosts
|
|
||||||
|
|
||||||
def _expand_hostpattern(self, hostpattern):
|
|
||||||
'''
|
|
||||||
Takes a single host pattern and returns a list of hostnames and an
|
|
||||||
optional port number that applies to all of them.
|
|
||||||
'''
|
|
||||||
|
|
||||||
# Can the given hostpattern be parsed as a host with an optional port
|
|
||||||
# specification?
|
|
||||||
|
|
||||||
try:
|
|
||||||
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
|
|
||||||
except:
|
|
||||||
# not a recognizable host pattern
|
|
||||||
pattern = hostpattern
|
|
||||||
port = None
|
|
||||||
|
|
||||||
# Once we have separated the pattern, we expand it into list of one or
|
|
||||||
# more hostnames, depending on whether it contains any [x:y] ranges.
|
|
||||||
|
|
||||||
if detect_range(pattern):
|
|
||||||
hostnames = expand_hostname_range(pattern)
|
|
||||||
else:
|
|
||||||
hostnames = [pattern]
|
|
||||||
|
|
||||||
return (hostnames, port)
|
|
||||||
|
|
||||||
def _Hosts(self, hostnames, port):
|
|
||||||
'''
|
|
||||||
Takes a list of hostnames and a port (which may be None) and returns a
|
|
||||||
list of Hosts (without recreating anything in self.hosts).
|
|
||||||
'''
|
|
||||||
|
|
||||||
hosts = []
|
|
||||||
|
|
||||||
# Note that we decide whether or not to create a Host based solely on
|
|
||||||
# the (non-)existence of its hostname in self.hosts. This means that one
|
|
||||||
# cannot add both "foo:22" and "foo:23" to the inventory.
|
|
||||||
|
|
||||||
for hn in hostnames:
|
|
||||||
if hn not in self.hosts:
|
|
||||||
self.hosts[hn] = Host(name=hn, port=port)
|
|
||||||
hosts.append(self.hosts[hn])
|
|
||||||
|
|
||||||
return hosts
|
|
||||||
|
|
||||||
def get_host_variables(self, host):
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def _compile_patterns(self):
|
|
||||||
'''
|
|
||||||
Compiles the regular expressions required to parse the inventory and stores them in self.patterns.
|
|
||||||
'''
|
|
||||||
self.patterns['groupname'] = re.compile( r'''^[A-Za-z_][A-Za-z0-9_]*$''')
|
|
@ -0,0 +1,103 @@
|
|||||||
|
# Copyright 2017 RedHat, inc
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#############################################
|
||||||
|
'''
|
||||||
|
DOCUMENTATION:
|
||||||
|
inventory: advanced_host_list
|
||||||
|
version_added: "2.4"
|
||||||
|
short_description: Parses a 'host list' with ranges
|
||||||
|
description:
|
||||||
|
- Parses a host list string as a comma separated values of hosts and supports host ranges.
|
||||||
|
- This plugin only applies to inventory sources that are not paths and contain at least one comma.
|
||||||
|
EXAMPLES:
|
||||||
|
# simple range
|
||||||
|
ansible -i 'host[1:10],' -m ping
|
||||||
|
|
||||||
|
# still supports w/o ranges also
|
||||||
|
ansible-playbook -i 'localhost,' play.yml
|
||||||
|
'''
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
|
from ansible.module_utils._text import to_bytes, to_text, to_native
|
||||||
|
from ansible.parsing.utils.addresses import parse_address
|
||||||
|
from ansible.plugins.inventory import BaseInventoryPlugin, detect_range, expand_hostname_range
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryModule(BaseInventoryPlugin):
|
||||||
|
|
||||||
|
NAME = 'advanced_host_list'
|
||||||
|
|
||||||
|
def verify_file(self, host_list):
|
||||||
|
|
||||||
|
valid = False
|
||||||
|
b_path = to_bytes(host_list)
|
||||||
|
if not os.path.exists(b_path) and ',' in host_list:
|
||||||
|
valid = True
|
||||||
|
return valid
|
||||||
|
|
||||||
|
def parse(self, inventory, loader, host_list, cache=True):
|
||||||
|
''' parses the inventory file '''
|
||||||
|
|
||||||
|
super(InventoryModule, self).parse(inventory, loader, host_list)
|
||||||
|
|
||||||
|
try:
|
||||||
|
for h in host_list.split(','):
|
||||||
|
if h:
|
||||||
|
try:
|
||||||
|
(hostnames, port) = self._expand_hostpattern(h)
|
||||||
|
except AnsibleError as e:
|
||||||
|
self.display.vvv("Unable to parse address from hostname, leaving unchanged: %s" % to_native(e))
|
||||||
|
host = [h]
|
||||||
|
port = None
|
||||||
|
|
||||||
|
for host in hostnames:
|
||||||
|
if host not in self.inventory.hosts:
|
||||||
|
self.inventory.add_host(host, group='ungrouped', port=port)
|
||||||
|
else:
|
||||||
|
self.display.warning("Skipping invalid hostname: %s" % to_text(h))
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleParserError("Invalid data from string, could not parse: %s" % str(e))
|
||||||
|
|
||||||
|
def _expand_hostpattern(self, hostpattern):
|
||||||
|
'''
|
||||||
|
Takes a single host pattern and returns a list of hostnames and an
|
||||||
|
optional port number that applies to all of them.
|
||||||
|
'''
|
||||||
|
# Can the given hostpattern be parsed as a host with an optional port
|
||||||
|
# specification?
|
||||||
|
|
||||||
|
try:
|
||||||
|
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
|
||||||
|
except:
|
||||||
|
# not a recognizable host pattern
|
||||||
|
pattern = hostpattern
|
||||||
|
port = None
|
||||||
|
|
||||||
|
# Once we have separated the pattern, we expand it into list of one or
|
||||||
|
# more hostnames, depending on whether it contains any [x:y] ranges.
|
||||||
|
|
||||||
|
if detect_range(pattern):
|
||||||
|
hostnames = expand_hostname_range(pattern)
|
||||||
|
else:
|
||||||
|
hostnames = [pattern]
|
||||||
|
|
||||||
|
return (hostnames, port)
|
@ -0,0 +1,78 @@
|
|||||||
|
# Copyright 2017 RedHat, inc
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
DOCUMENTATION:
|
||||||
|
inventory: host_list
|
||||||
|
version_added: "2.4"
|
||||||
|
short_description: Parses a 'host list' string
|
||||||
|
description:
|
||||||
|
- Parses a host list string as a comma separated values of hosts
|
||||||
|
- This plugin only applies to inventory strings that are not paths and contain a comma.
|
||||||
|
EXAMPLES: |
|
||||||
|
# define 2 hosts in command line
|
||||||
|
ansible -i '10.10.2.6, 10.10.2.4' -m ping all
|
||||||
|
|
||||||
|
# DNS resolvable names
|
||||||
|
ansible -i 'host1.example.com, host2' -m user -a 'name=me state=abset' all
|
||||||
|
|
||||||
|
# just use localhost
|
||||||
|
ansible-playbook -i 'localhost,' play.yml -c local
|
||||||
|
'''
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
|
from ansible.module_utils.six import string_types
|
||||||
|
from ansible.module_utils._text import to_bytes, to_text, to_native
|
||||||
|
from ansible.parsing.utils.addresses import parse_address
|
||||||
|
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryModule(BaseInventoryPlugin):
|
||||||
|
|
||||||
|
NAME = 'host_list'
|
||||||
|
|
||||||
|
def verify_file(self, host_list):
|
||||||
|
|
||||||
|
valid = False
|
||||||
|
b_path = to_bytes(host_list)
|
||||||
|
if not os.path.exists(b_path) and ',' in host_list:
|
||||||
|
valid = True
|
||||||
|
return valid
|
||||||
|
|
||||||
|
def parse(self, inventory, loader, host_list, cache=True):
|
||||||
|
''' parses the inventory file '''
|
||||||
|
|
||||||
|
super(InventoryModule, self).parse(inventory, loader, host_list)
|
||||||
|
|
||||||
|
try:
|
||||||
|
for h in host_list.split(','):
|
||||||
|
if h:
|
||||||
|
try:
|
||||||
|
(host, port) = parse_address(h, allow_ranges=False)
|
||||||
|
except AnsibleError as e:
|
||||||
|
self.display.vvv("Unable to parse address from hostname, leaving unchanged: %s" % to_native(e))
|
||||||
|
host = h
|
||||||
|
port = None
|
||||||
|
|
||||||
|
if host not in self.inventory.hosts:
|
||||||
|
self.inventory.add_host(host, group='ungrouped', port=port)
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleParserError("Invalid data from string, could not parse: %s" % str(e))
|
@ -0,0 +1,188 @@
|
|||||||
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
DOCUMENTATION:
|
||||||
|
inventory: script
|
||||||
|
version_added: "2.4"
|
||||||
|
short_description: Executes an inventory script that returns JSON
|
||||||
|
description:
|
||||||
|
- The source provided must an executable that returns Ansible inventory JSON
|
||||||
|
- The source must accept C(--list) and C(--host <hostname>) as arguments.
|
||||||
|
C(--host) will only be used if no C(_meta) key is present (performance optimization)
|
||||||
|
notes:
|
||||||
|
- It takes the place of the previously hardcoded script inventory.
|
||||||
|
- To function it requires being whitelisted in configuration, which is true by default.
|
||||||
|
'''
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
from collections import Mapping
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
|
from ansible.module_utils.basic import json_dict_bytes_to_unicode
|
||||||
|
from ansible.module_utils.six import iteritems
|
||||||
|
from ansible.module_utils._text import to_native, to_text
|
||||||
|
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryModule(BaseInventoryPlugin):
|
||||||
|
''' Host inventory parser for ansible using external inventory scripts. '''
|
||||||
|
|
||||||
|
NAME = 'script'
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
super(InventoryModule, self).__init__()
|
||||||
|
|
||||||
|
self._hosts = set()
|
||||||
|
|
||||||
|
def verify_file(self, path):
|
||||||
|
''' Verify if file is usable by this plugin, base does minimal accesability check '''
|
||||||
|
|
||||||
|
valid = super(InventoryModule, self).verify_file(path)
|
||||||
|
|
||||||
|
if valid:
|
||||||
|
# not only accessible, file must be executable and/or have shebang
|
||||||
|
shebang_present = False
|
||||||
|
try:
|
||||||
|
with open(path, 'rb') as inv_file:
|
||||||
|
initial_chars = inv_file.read(2)
|
||||||
|
if initial_chars.startswith(b'#!'):
|
||||||
|
shebang_present = True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not os.access(path, os.X_OK) and not shebang_present:
|
||||||
|
valid = False
|
||||||
|
|
||||||
|
return valid
|
||||||
|
|
||||||
|
def parse(self, inventory, loader, path, cache=True):
|
||||||
|
|
||||||
|
super(InventoryModule, self).parse(inventory, loader, path)
|
||||||
|
|
||||||
|
# Support inventory scripts that are not prefixed with some
|
||||||
|
# path information but happen to be in the current working
|
||||||
|
# directory when '.' is not in PATH.
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
cmd = [ path, "--list" ]
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
except OSError as e:
|
||||||
|
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
|
||||||
|
(stdout, stderr) = sp.communicate()
|
||||||
|
|
||||||
|
path = to_native(path)
|
||||||
|
if stderr:
|
||||||
|
err = to_native(stderr) + "\n"
|
||||||
|
|
||||||
|
if sp.returncode != 0:
|
||||||
|
raise AnsibleError("Inventory script (%s) had an execution error: %s " % (path, err))
|
||||||
|
|
||||||
|
# make sure script output is unicode so that json loader will output
|
||||||
|
# unicode strings itself
|
||||||
|
try:
|
||||||
|
data = to_text(stdout, errors="strict")
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleError("Inventory {0} contained characters that cannot be interpreted as UTF-8: {1}".format(path, to_native(e)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
processed = self.loader.load(data)
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleError("failed to parse executable inventory script results from {0}: {1}\n{2}".format(path, to_native(e), err))
|
||||||
|
|
||||||
|
if not isinstance(processed, Mapping):
|
||||||
|
raise AnsibleError("failed to parse executable inventory script results from {0}: needs to be a json dict\n{1}".format(path, err))
|
||||||
|
|
||||||
|
group = None
|
||||||
|
data_from_meta = None
|
||||||
|
for (group, gdata) in data.items():
|
||||||
|
if group == '_meta':
|
||||||
|
if 'hostvars' in data:
|
||||||
|
data_from_meta = data['hostvars']
|
||||||
|
else:
|
||||||
|
self.parse_group(group, gdata)
|
||||||
|
|
||||||
|
# in Ansible 1.3 and later, a "_meta" subelement may contain
|
||||||
|
# a variable "hostvars" which contains a hash for each host
|
||||||
|
# if this "hostvars" exists at all then do not call --host for each
|
||||||
|
# host. This is for efficiency and scripts should still return data
|
||||||
|
# if called with --host for backwards compat with 1.2 and earlier.
|
||||||
|
for host in self._hosts:
|
||||||
|
got = {}
|
||||||
|
if data_from_meta is None:
|
||||||
|
got = self.get_host_variables(path, host, data_from_meta)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
got = data.get(host, {})
|
||||||
|
except AttributeError as e:
|
||||||
|
raise AnsibleError("Improperly formatted host information for %s: %s" % (host,to_native(e)))
|
||||||
|
|
||||||
|
self.populate_host_vars(host, got, group)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleParserError(e)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_group(self, group, data):
|
||||||
|
|
||||||
|
self.inventory.add_group(group)
|
||||||
|
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
data = {'hosts': data}
|
||||||
|
# is not those subkeys, then simplified syntax, host with vars
|
||||||
|
elif not any(k in data for k in ('hosts','vars','children')):
|
||||||
|
data = {'hosts': [group], 'vars': data}
|
||||||
|
|
||||||
|
if 'hosts' in data:
|
||||||
|
if not isinstance(data['hosts'], list):
|
||||||
|
raise AnsibleError("You defined a group '%s' with bad data for the host list:\n %s" % (group, data))
|
||||||
|
|
||||||
|
for hostname in data['hosts']:
|
||||||
|
self._hosts.add(hostname)
|
||||||
|
self.inventory.add_host(hostname, group)
|
||||||
|
|
||||||
|
if 'vars' in data:
|
||||||
|
if not isinstance(data['vars'], dict):
|
||||||
|
raise AnsibleError("You defined a group '%s' with bad data for variables:\n %s" % (group, data))
|
||||||
|
|
||||||
|
for k, v in iteritems(data['vars']):
|
||||||
|
self.inventory.set_variable(group, k, v)
|
||||||
|
|
||||||
|
if group != 'meta' and isinstance(data, dict) and 'children' in data:
|
||||||
|
for child_name in data['children']:
|
||||||
|
self.inventory.add_group(child_name)
|
||||||
|
self.inventory.add_child(group, child_name)
|
||||||
|
|
||||||
|
def get_host_variables(self, path, host):
|
||||||
|
""" Runs <script> --host <hostname>, to determine additional host variables """
|
||||||
|
|
||||||
|
cmd = [path, "--host", host]
|
||||||
|
try:
|
||||||
|
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
except OSError as e:
|
||||||
|
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
|
||||||
|
(out, err) = sp.communicate()
|
||||||
|
if out.strip() == '':
|
||||||
|
return {}
|
||||||
|
try:
|
||||||
|
return json_dict_bytes_to_unicode(self.loader.load(out))
|
||||||
|
except ValueError:
|
||||||
|
raise AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
|
@ -0,0 +1,169 @@
|
|||||||
|
# This file is part of Ansible,
|
||||||
|
# (c) 2012-2017, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#############################################
|
||||||
|
'''
|
||||||
|
DOCUMENTATION:
|
||||||
|
name: virtualbox
|
||||||
|
plugin_type: inventory
|
||||||
|
short_description: virtualbox inventory source
|
||||||
|
description:
|
||||||
|
- Get inventory hosts from the local virtualbox installation.
|
||||||
|
- Uses a <name>.vbox.conf YAML configuration file.
|
||||||
|
options:
|
||||||
|
running_only:
|
||||||
|
description: toggles showing all vms vs only those currently running
|
||||||
|
default: False
|
||||||
|
settings_password_file:
|
||||||
|
description: provide a file containing the settings password (equivalent to --settingspwfile)
|
||||||
|
network_info_path:
|
||||||
|
description: property path to query for network information (ansible_host)
|
||||||
|
default: "/VirtualBox/GuestInfo/Net/0/V4/IP"
|
||||||
|
'''
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from subprocess import Popen, PIPE
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleParserError
|
||||||
|
from ansible.module_utils._text import to_bytes
|
||||||
|
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryModule(BaseInventoryPlugin):
|
||||||
|
''' Host inventory parser for ansible using external inventory scripts. '''
|
||||||
|
|
||||||
|
NAME = 'virtualbox'
|
||||||
|
|
||||||
|
def verify_file(self, path):
|
||||||
|
|
||||||
|
valid = False
|
||||||
|
if super(InventoryModule, self).verify_file(path):
|
||||||
|
if path.endswith('.vbox.conf'):
|
||||||
|
valid = True
|
||||||
|
return valid
|
||||||
|
|
||||||
|
def parse(self, inventory, loader, path, cache=True):
|
||||||
|
|
||||||
|
super(InventoryModule, self).parse(inventory, loader, path)
|
||||||
|
|
||||||
|
VBOX = "VBoxManage"
|
||||||
|
cache_key = self.get_cache_prefix(path)
|
||||||
|
|
||||||
|
if cache and cache_key not in inventory.cache:
|
||||||
|
source_data = inventory.cache[cache_key]
|
||||||
|
else:
|
||||||
|
# file is config file
|
||||||
|
try:
|
||||||
|
data = self.loader.load_from_file(path)
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleParserError(e)
|
||||||
|
|
||||||
|
if not data or data.get('plugin') != self.NAME:
|
||||||
|
return False
|
||||||
|
|
||||||
|
pwfile = to_bytes(data.get('settings_password_file'))
|
||||||
|
netinfo = data.get('network_info_path', "/VirtualBox/GuestInfo/Net/0/V4/IP")
|
||||||
|
running = data.get('running_only', False)
|
||||||
|
|
||||||
|
# start getting data
|
||||||
|
cmd = [VBOX, 'list', '-l']
|
||||||
|
if running:
|
||||||
|
cmd.append('runningvms')
|
||||||
|
else:
|
||||||
|
cmd.append('vms')
|
||||||
|
|
||||||
|
if pwfile and os.path.exists(pwfile):
|
||||||
|
cmd.append('--settingspwfile')
|
||||||
|
cmd.append(pwfile)
|
||||||
|
|
||||||
|
try:
|
||||||
|
p = Popen(cmd, stdout=PIPE)
|
||||||
|
except Exception as e:
|
||||||
|
AnsibleParserError(e)
|
||||||
|
|
||||||
|
hostvars = {}
|
||||||
|
prevkey = pref_k = ''
|
||||||
|
current_host = None
|
||||||
|
|
||||||
|
source_data = p.stdout.readlines()
|
||||||
|
inventory.cache[cache_key] = source_data
|
||||||
|
|
||||||
|
for line in source_data:
|
||||||
|
|
||||||
|
try:
|
||||||
|
k, v = line.split(':', 1)
|
||||||
|
except:
|
||||||
|
# skip non splitable
|
||||||
|
continue
|
||||||
|
|
||||||
|
if k.strip() == '':
|
||||||
|
# skip empty
|
||||||
|
continue
|
||||||
|
|
||||||
|
v = v.strip()
|
||||||
|
# found host
|
||||||
|
if k.startswith('Name') and ',' not in v: # some setting strings appear in Name
|
||||||
|
current_host = v
|
||||||
|
if current_host not in hostvars:
|
||||||
|
hostvars[current_host] = {}
|
||||||
|
self.inventory.add_host(current_host)
|
||||||
|
# try to get network info
|
||||||
|
try:
|
||||||
|
cmd = [VBOX, 'guestproperty', 'get', current_host, netinfo]
|
||||||
|
if args:
|
||||||
|
cmd.append(args)
|
||||||
|
x = Popen(cmd, stdout=PIPE)
|
||||||
|
ipinfo = x.stdout.read()
|
||||||
|
if 'Value' in ipinfo:
|
||||||
|
a, ip = ipinfo.split(':', 1)
|
||||||
|
self.inventory.set_variable(current_host, 'ansible_host', ip.strip())
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# found groups
|
||||||
|
elif k == 'Groups':
|
||||||
|
for group in v.split('/'):
|
||||||
|
if group:
|
||||||
|
self.inventory.add_group(group)
|
||||||
|
self.inventory.add_child(group, current_host)
|
||||||
|
continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
# found vars, accumulate in hostvars for clean inventory set
|
||||||
|
pref_k = 'vbox_' + k.strip().replace(' ', '_')
|
||||||
|
if k.startswith(' '):
|
||||||
|
if prevkey not in hostvars[current_host]:
|
||||||
|
hostvars[current_host][prevkey] = {}
|
||||||
|
hostvars[current_host][prevkey][pref_k] = v
|
||||||
|
else:
|
||||||
|
if v != '':
|
||||||
|
hostvars[current_host][pref_k] = v
|
||||||
|
|
||||||
|
prevkey = pref_k
|
||||||
|
|
||||||
|
# set vars in inventory from hostvars
|
||||||
|
for host in hostvars:
|
||||||
|
|
||||||
|
# create composite vars
|
||||||
|
if data.get('compose') and isinstance(data['compose'], dict):
|
||||||
|
for varname in data['compose']:
|
||||||
|
hostvars[host][varname] = self._compose(data['compose'][varname], hostvars[host])
|
||||||
|
|
||||||
|
# actually update inventory
|
||||||
|
for key in hostvars[host]:
|
||||||
|
self.inventory.set_variable(host, key, hostvars[host][key])
|
@ -0,0 +1,181 @@
|
|||||||
|
# Copyright 2017 RedHat, inc
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#############################################
|
||||||
|
'''
|
||||||
|
DOCUMENTATION:
|
||||||
|
inventory: yaml
|
||||||
|
version_added: "2.4"
|
||||||
|
short_description: Uses a specifically YAML file as inventory source.
|
||||||
|
description:
|
||||||
|
- YAML based inventory, starts with the 'all' group and has hosts/vars/children entries.
|
||||||
|
- Host entries can have sub-entries defined, which will be treated as variables.
|
||||||
|
- Vars entries are normal group vars.
|
||||||
|
- Children are 'child groups', which can also have their own vars/hosts/children and so on.
|
||||||
|
- File MUST have a valid extension: yaml, yml, json.
|
||||||
|
notes:
|
||||||
|
- It takes the place of the previously hardcoded YAML inventory.
|
||||||
|
- To function it requires being whitelisted in configuration.
|
||||||
|
options:
|
||||||
|
_yaml_extensions:
|
||||||
|
description: list of 'valid' extensions for files containing YAML
|
||||||
|
EXAMPLES:
|
||||||
|
all: # keys must be unique, i.e. only one 'hosts' per group
|
||||||
|
hosts:
|
||||||
|
test1:
|
||||||
|
test2:
|
||||||
|
var1: value1
|
||||||
|
vars:
|
||||||
|
group_var1: value2
|
||||||
|
children: # key order does not matter, indentation does
|
||||||
|
other_group:
|
||||||
|
children:
|
||||||
|
group_x:
|
||||||
|
hosts:
|
||||||
|
test5
|
||||||
|
vars:
|
||||||
|
g2_var2: value3
|
||||||
|
hosts:
|
||||||
|
test4:
|
||||||
|
ansible_host: 127.0.0.1
|
||||||
|
last_group:
|
||||||
|
hosts:
|
||||||
|
test1 # same host as above, additional group membership
|
||||||
|
vars:
|
||||||
|
last_var: MYVALUE
|
||||||
|
'''
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ansible import constants as C
|
||||||
|
from ansible.errors import AnsibleParserError
|
||||||
|
from ansible.module_utils.six import string_types
|
||||||
|
from ansible.module_utils._text import to_bytes, to_text
|
||||||
|
from ansible.parsing.utils.addresses import parse_address
|
||||||
|
from ansible.plugins.inventory import BaseFileInventoryPlugin, detect_range, expand_hostname_range
|
||||||
|
|
||||||
|
class InventoryModule(BaseFileInventoryPlugin):
|
||||||
|
|
||||||
|
NAME = 'yaml'
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
super(InventoryModule, self).__init__()
|
||||||
|
self.patterns = {}
|
||||||
|
|
||||||
|
self._compile_patterns()
|
||||||
|
|
||||||
|
def verify_file(self, path):
|
||||||
|
|
||||||
|
valid = False
|
||||||
|
b_path = to_bytes(path)
|
||||||
|
if super(InventoryModule, self).verify_file(b_path):
|
||||||
|
file_name, ext = os.path.splitext(b_path)
|
||||||
|
if ext and ext in C.YAML_FILENAME_EXTENSIONS:
|
||||||
|
valid = True
|
||||||
|
return valid
|
||||||
|
|
||||||
|
def parse(self, inventory, loader, path, cache=True):
|
||||||
|
''' parses the inventory file '''
|
||||||
|
|
||||||
|
super(InventoryModule, self).parse(inventory, loader, path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = self.loader.load_from_file(path)
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleParserError(e)
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# We expect top level keys to correspond to groups, iterate over them
|
||||||
|
# to get host, vars and subgroups (which we iterate over recursivelly)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
for group_name in data:
|
||||||
|
self._parse_group(group_name, data[group_name])
|
||||||
|
else:
|
||||||
|
raise AnsibleParserError("Invalid data from file, expected dictionary and got:\n\n%s" % data)
|
||||||
|
|
||||||
|
def _parse_group(self, group, group_data):
|
||||||
|
|
||||||
|
if self.patterns['groupname'].match(group):
|
||||||
|
|
||||||
|
self.inventory.add_group(group)
|
||||||
|
|
||||||
|
if isinstance(group_data, dict):
|
||||||
|
#make sure they are dicts
|
||||||
|
for section in ['vars', 'children', 'hosts']:
|
||||||
|
if section in group_data and isinstance(group_data[section], string_types):
|
||||||
|
group_data[section] = {group_data[section]: None}
|
||||||
|
|
||||||
|
if 'vars' in group_data:
|
||||||
|
for var in group_data['vars']:
|
||||||
|
self.inventory.set_variable(group, var, group_data['vars'][var])
|
||||||
|
|
||||||
|
if 'children' in group_data:
|
||||||
|
for subgroup in group_data['children']:
|
||||||
|
self._parse_group(subgroup, group_data['children'][subgroup])
|
||||||
|
self.inventory.add_child(group, subgroup)
|
||||||
|
|
||||||
|
if 'hosts' in group_data:
|
||||||
|
for host_pattern in group_data['hosts']:
|
||||||
|
hosts, port = self._parse_host(host_pattern)
|
||||||
|
self.populate_host_vars(hosts, group_data['hosts'][host_pattern], group, port)
|
||||||
|
else:
|
||||||
|
self.display.warning("Skipping '%s' as this is not a valid group name" % group)
|
||||||
|
|
||||||
|
def _parse_host(self, host_pattern):
|
||||||
|
'''
|
||||||
|
Each host key can be a pattern, try to process it and add variables as needed
|
||||||
|
'''
|
||||||
|
(hostnames, port) = self._expand_hostpattern(host_pattern)
|
||||||
|
|
||||||
|
return hostnames, port
|
||||||
|
|
||||||
|
def _expand_hostpattern(self, hostpattern):
|
||||||
|
'''
|
||||||
|
Takes a single host pattern and returns a list of hostnames and an
|
||||||
|
optional port number that applies to all of them.
|
||||||
|
'''
|
||||||
|
# Can the given hostpattern be parsed as a host with an optional port
|
||||||
|
# specification?
|
||||||
|
|
||||||
|
try:
|
||||||
|
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
|
||||||
|
except:
|
||||||
|
# not a recognizable host pattern
|
||||||
|
pattern = hostpattern
|
||||||
|
port = None
|
||||||
|
|
||||||
|
# Once we have separated the pattern, we expand it into list of one or
|
||||||
|
# more hostnames, depending on whether it contains any [x:y] ranges.
|
||||||
|
|
||||||
|
if detect_range(pattern):
|
||||||
|
hostnames = expand_hostname_range(pattern)
|
||||||
|
else:
|
||||||
|
hostnames = [pattern]
|
||||||
|
|
||||||
|
return (hostnames, port)
|
||||||
|
|
||||||
|
def _compile_patterns(self):
|
||||||
|
'''
|
||||||
|
Compiles the regular expressions required to parse the inventory and stores them in self.patterns.
|
||||||
|
'''
|
||||||
|
self.patterns['groupname'] = re.compile( r'''^[A-Za-z_][A-Za-z0-9_]*$''')
|
@ -0,0 +1,107 @@
|
|||||||
|
# Copyright 2017 RedHat, inc
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#############################################
|
||||||
|
'''
|
||||||
|
DOCUMENTATION:
|
||||||
|
vars: host_group_vars
|
||||||
|
version_added: "2.4"
|
||||||
|
short_description: In charge of loading group_vars and host_vars
|
||||||
|
description:
|
||||||
|
- Loads YAML vars into corresponding groups/hosts in group_vars/ and host_vars/ directories.
|
||||||
|
- Files are restricted by extension to one of .yaml, .json, .yml or no extension.
|
||||||
|
- Only applies to inventory sources that are existing paths.
|
||||||
|
notes:
|
||||||
|
- It takes the place of the previously hardcoded group_vars/host_vars loading.
|
||||||
|
'''
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
from ansible import constants as C
|
||||||
|
from ansible.errors import AnsibleParserError
|
||||||
|
from ansible.module_utils._text import to_bytes, to_text
|
||||||
|
from ansible.plugins.vars import BaseVarsPlugin
|
||||||
|
from ansible.utils.path import basedir
|
||||||
|
from ansible.inventory.host import Host
|
||||||
|
from ansible.inventory.group import Group
|
||||||
|
from ansible.utils.vars import combine_vars
|
||||||
|
|
||||||
|
|
||||||
|
class VarsModule(BaseVarsPlugin):
|
||||||
|
|
||||||
|
def get_vars(self, loader, path, entities):
|
||||||
|
''' parses the inventory file '''
|
||||||
|
|
||||||
|
if not isinstance(entities, list):
|
||||||
|
entities = [entities]
|
||||||
|
|
||||||
|
super(VarsModule, self).get_vars(loader, path, entities)
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
for entity in entities:
|
||||||
|
if isinstance(entity, Host):
|
||||||
|
subdir = 'host_vars'
|
||||||
|
elif isinstance(entity, Group):
|
||||||
|
subdir = 'group_vars'
|
||||||
|
else:
|
||||||
|
raise AnsibleParserError("Supplied entity must be Host or Group, got %s instead" % (type(entity)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
# load vars
|
||||||
|
opath = os.path.realpath(os.path.join(self._basedir, subdir))
|
||||||
|
b_opath = to_bytes(opath)
|
||||||
|
# no need to do much if path does not exist for basedir
|
||||||
|
if os.path.exists(b_opath):
|
||||||
|
if os.path.isdir(b_opath):
|
||||||
|
self._display.debug("\tprocessing dir %s" % opath)
|
||||||
|
for found in self._find_vars_files(opath, entity.name):
|
||||||
|
self._display.debug("READING %s" % found)
|
||||||
|
data = combine_vars(data, loader.load_from_file(found, cache=True, unsafe=True))
|
||||||
|
else:
|
||||||
|
self._display.warning("Found %s that is not a directory, skipping: %s" % (subdir, opath))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleParserError(to_text(e))
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _find_vars_files(self, path, name):
|
||||||
|
""" Find {group,host}_vars files """
|
||||||
|
|
||||||
|
b_path = to_bytes(os.path.join(path, name))
|
||||||
|
found = []
|
||||||
|
for ext in C.YAML_FILENAME_EXTENSIONS + ['']:
|
||||||
|
|
||||||
|
if '.' in ext:
|
||||||
|
full_path = b_path + to_bytes(ext)
|
||||||
|
elif ext:
|
||||||
|
full_path = b'.'.join([b_path, to_bytes(ext)])
|
||||||
|
else:
|
||||||
|
full_path = b_path
|
||||||
|
|
||||||
|
if os.path.exists(full_path):
|
||||||
|
self._display.debug("\tfound %s" % to_text(full_path))
|
||||||
|
if os.path.isdir(full_path):
|
||||||
|
# matched dir name, so use all files included recursively
|
||||||
|
for spath in os.listdir(full_path):
|
||||||
|
if os.path.isdir(spath):
|
||||||
|
found.extend(self._find_vars_files(spath, name))
|
||||||
|
else:
|
||||||
|
found.append(spath)
|
||||||
|
else:
|
||||||
|
found.append(full_path)
|
||||||
|
return found
|
@ -1,715 +0,0 @@
|
|||||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
# Make coding more python3-ish
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from collections import defaultdict, MutableMapping
|
|
||||||
|
|
||||||
try:
|
|
||||||
from hashlib import sha1
|
|
||||||
except ImportError:
|
|
||||||
from sha import sha as sha1
|
|
||||||
|
|
||||||
from jinja2.exceptions import UndefinedError
|
|
||||||
|
|
||||||
from ansible import constants as C
|
|
||||||
from ansible.cli import CLI
|
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound
|
|
||||||
from ansible.inventory.host import Host
|
|
||||||
from ansible.module_utils.six import iteritems, string_types, text_type
|
|
||||||
from ansible.plugins import lookup_loader
|
|
||||||
from ansible.plugins.cache import FactCache
|
|
||||||
from ansible.template import Templar
|
|
||||||
from ansible.utils.listify import listify_lookup_plugin_terms
|
|
||||||
from ansible.utils.vars import combine_vars
|
|
||||||
from ansible.utils.unsafe_proxy import wrap_var
|
|
||||||
from ansible.module_utils._text import to_native
|
|
||||||
|
|
||||||
try:
|
|
||||||
from __main__ import display
|
|
||||||
except ImportError:
|
|
||||||
from ansible.utils.display import Display
|
|
||||||
display = Display()
|
|
||||||
|
|
||||||
|
|
||||||
VARIABLE_CACHE = {}
|
|
||||||
HOSTVARS_CACHE = {}
|
|
||||||
|
|
||||||
|
|
||||||
class AnsibleInventoryVarsData(dict):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(AnsibleInventoryVarsData, self).__init__(*args, **kwargs)
|
|
||||||
self.path = None
|
|
||||||
|
|
||||||
|
|
||||||
def preprocess_vars(a):
|
|
||||||
'''
|
|
||||||
Ensures that vars contained in the parameter passed in are
|
|
||||||
returned as a list of dictionaries, to ensure for instance
|
|
||||||
that vars loaded from a file conform to an expected state.
|
|
||||||
'''
|
|
||||||
|
|
||||||
if a is None:
|
|
||||||
return None
|
|
||||||
elif not isinstance(a, list):
|
|
||||||
data = [a]
|
|
||||||
else:
|
|
||||||
data = a
|
|
||||||
|
|
||||||
for item in data:
|
|
||||||
if not isinstance(item, MutableMapping):
|
|
||||||
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def strip_internal_keys(dirty):
|
|
||||||
'''
|
|
||||||
All keys stating with _ansible_ are internal, so create a copy of the 'dirty' dict
|
|
||||||
and remove them from the clean one before returning it
|
|
||||||
'''
|
|
||||||
clean = dirty.copy()
|
|
||||||
for k in dirty.keys():
|
|
||||||
if isinstance(k, string_types) and k.startswith('_ansible_'):
|
|
||||||
del clean[k]
|
|
||||||
elif isinstance(dirty[k], dict):
|
|
||||||
clean[k] = strip_internal_keys(dirty[k])
|
|
||||||
return clean
|
|
||||||
|
|
||||||
|
|
||||||
class VariableManager:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
|
|
||||||
self._nonpersistent_fact_cache = defaultdict(dict)
|
|
||||||
self._vars_cache = defaultdict(dict)
|
|
||||||
self._extra_vars = defaultdict(dict)
|
|
||||||
self._host_vars_files = defaultdict(dict)
|
|
||||||
self._group_vars_files = defaultdict(dict)
|
|
||||||
self._inventory = None
|
|
||||||
self._hostvars = None
|
|
||||||
self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
|
|
||||||
self._options_vars = defaultdict(dict)
|
|
||||||
|
|
||||||
# bad cache plugin is not fatal error
|
|
||||||
try:
|
|
||||||
self._fact_cache = FactCache()
|
|
||||||
except AnsibleError as e:
|
|
||||||
display.warning(to_native(e))
|
|
||||||
# fallback to a dict as in memory cache
|
|
||||||
self._fact_cache = {}
|
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
data = dict(
|
|
||||||
fact_cache=self._fact_cache,
|
|
||||||
np_fact_cache=self._nonpersistent_fact_cache,
|
|
||||||
vars_cache=self._vars_cache,
|
|
||||||
extra_vars=self._extra_vars,
|
|
||||||
host_vars_files=self._host_vars_files,
|
|
||||||
group_vars_files=self._group_vars_files,
|
|
||||||
omit_token=self._omit_token,
|
|
||||||
options_vars=self._options_vars,
|
|
||||||
# inventory=self._inventory,
|
|
||||||
)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def __setstate__(self, data):
|
|
||||||
self._fact_cache = data.get('fact_cache', defaultdict(dict))
|
|
||||||
self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
|
|
||||||
self._vars_cache = data.get('vars_cache', defaultdict(dict))
|
|
||||||
self._extra_vars = data.get('extra_vars', dict())
|
|
||||||
self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
|
|
||||||
self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
|
|
||||||
self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
|
|
||||||
self._inventory = data.get('inventory', None)
|
|
||||||
self._options_vars = data.get('options_vars', dict())
|
|
||||||
|
|
||||||
def _get_cache_entry(self, play=None, host=None, task=None):
|
|
||||||
play_id = "NONE"
|
|
||||||
if play:
|
|
||||||
play_id = play._uuid
|
|
||||||
|
|
||||||
host_id = "NONE"
|
|
||||||
if host:
|
|
||||||
host_id = host.get_name()
|
|
||||||
|
|
||||||
task_id = "NONE"
|
|
||||||
if task:
|
|
||||||
task_id = task._uuid
|
|
||||||
|
|
||||||
return "PLAY:%s;HOST:%s;TASK:%s" % (play_id, host_id, task_id)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extra_vars(self):
|
|
||||||
''' ensures a clean copy of the extra_vars are made '''
|
|
||||||
return self._extra_vars.copy()
|
|
||||||
|
|
||||||
@extra_vars.setter
|
|
||||||
def extra_vars(self, value):
|
|
||||||
''' ensures a clean copy of the extra_vars are used to set the value '''
|
|
||||||
assert isinstance(value, MutableMapping)
|
|
||||||
self._extra_vars = value.copy()
|
|
||||||
|
|
||||||
def set_inventory(self, inventory):
|
|
||||||
self._inventory = inventory
|
|
||||||
|
|
||||||
@property
|
|
||||||
def options_vars(self):
|
|
||||||
''' ensures a clean copy of the options_vars are made '''
|
|
||||||
return self._options_vars.copy()
|
|
||||||
|
|
||||||
@options_vars.setter
|
|
||||||
def options_vars(self, value):
|
|
||||||
''' ensures a clean copy of the options_vars are used to set the value '''
|
|
||||||
assert isinstance(value, dict)
|
|
||||||
self._options_vars = value.copy()
|
|
||||||
|
|
||||||
def _preprocess_vars(self, a):
|
|
||||||
'''
|
|
||||||
Ensures that vars contained in the parameter passed in are
|
|
||||||
returned as a list of dictionaries, to ensure for instance
|
|
||||||
that vars loaded from a file conform to an expected state.
|
|
||||||
'''
|
|
||||||
|
|
||||||
if a is None:
|
|
||||||
return None
|
|
||||||
elif not isinstance(a, list):
|
|
||||||
data = [a]
|
|
||||||
else:
|
|
||||||
data = a
|
|
||||||
|
|
||||||
for item in data:
|
|
||||||
if not isinstance(item, MutableMapping):
|
|
||||||
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True):
|
|
||||||
'''
|
|
||||||
Returns the variables, with optional "context" given via the parameters
|
|
||||||
for the play, host, and task (which could possibly result in different
|
|
||||||
sets of variables being returned due to the additional context).
|
|
||||||
|
|
||||||
The order of precedence is:
|
|
||||||
- play->roles->get_default_vars (if there is a play context)
|
|
||||||
- group_vars_files[host] (if there is a host context)
|
|
||||||
- host_vars_files[host] (if there is a host context)
|
|
||||||
- host->get_vars (if there is a host context)
|
|
||||||
- fact_cache[host] (if there is a host context)
|
|
||||||
- play vars (if there is a play context)
|
|
||||||
- play vars_files (if there's no host context, ignore
|
|
||||||
file names that cannot be templated)
|
|
||||||
- task->get_vars (if there is a task context)
|
|
||||||
- vars_cache[host] (if there is a host context)
|
|
||||||
- extra vars
|
|
||||||
'''
|
|
||||||
|
|
||||||
display.debug("in VariableManager get_vars()")
|
|
||||||
cache_entry = self._get_cache_entry(play=play, host=host, task=task)
|
|
||||||
if cache_entry in VARIABLE_CACHE and use_cache:
|
|
||||||
display.debug("vars are cached, returning them now")
|
|
||||||
return VARIABLE_CACHE[cache_entry]
|
|
||||||
|
|
||||||
all_vars = dict()
|
|
||||||
magic_variables = self._get_magic_variables(
|
|
||||||
loader=loader,
|
|
||||||
play=play,
|
|
||||||
host=host,
|
|
||||||
task=task,
|
|
||||||
include_hostvars=include_hostvars,
|
|
||||||
include_delegate_to=include_delegate_to,
|
|
||||||
)
|
|
||||||
|
|
||||||
if play:
|
|
||||||
# first we compile any vars specified in defaults/main.yml
|
|
||||||
# for all roles within the specified play
|
|
||||||
for role in play.get_roles():
|
|
||||||
all_vars = combine_vars(all_vars, role.get_default_vars())
|
|
||||||
|
|
||||||
# if we have a task in this context, and that task has a role, make
|
|
||||||
# sure it sees its defaults above any other roles, as we previously
|
|
||||||
# (v1) made sure each task had a copy of its roles default vars
|
|
||||||
if task and task._role is not None and (play or task.action == 'include_role'):
|
|
||||||
all_vars = combine_vars(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()))
|
|
||||||
|
|
||||||
if host:
|
|
||||||
# first we merge in vars from groups specified in the inventory (INI or script)
|
|
||||||
all_vars = combine_vars(all_vars, host.get_group_vars())
|
|
||||||
|
|
||||||
# these are PLAY host/group vars, inventory adjacent ones have already been processed
|
|
||||||
# next, we load any vars from group_vars files and then any vars from host_vars
|
|
||||||
# files which may apply to this host or the groups it belongs to. We merge in the
|
|
||||||
# special 'all' group_vars first, if they exist
|
|
||||||
if 'all' in self._group_vars_files:
|
|
||||||
data = preprocess_vars(self._group_vars_files['all'])
|
|
||||||
for item in data:
|
|
||||||
all_vars = combine_vars(all_vars, item)
|
|
||||||
|
|
||||||
for group in sorted(host.get_groups(), key=lambda g: (g.depth, g.priority, g.name)):
|
|
||||||
if group.name in self._group_vars_files and group.name != 'all':
|
|
||||||
for data in self._group_vars_files[group.name]:
|
|
||||||
data = preprocess_vars(data)
|
|
||||||
for item in data:
|
|
||||||
all_vars = combine_vars(all_vars, item)
|
|
||||||
|
|
||||||
# then we merge in vars from the host specified in the inventory (INI or script)
|
|
||||||
all_vars = combine_vars(all_vars, host.get_vars())
|
|
||||||
|
|
||||||
# then we merge in the host_vars/<hostname> file, if it exists
|
|
||||||
host_name = host.get_name()
|
|
||||||
if host_name in self._host_vars_files:
|
|
||||||
for data in self._host_vars_files[host_name]:
|
|
||||||
data = preprocess_vars(data)
|
|
||||||
for item in data:
|
|
||||||
all_vars = combine_vars(all_vars, item)
|
|
||||||
|
|
||||||
# finally, the facts caches for this host, if it exists
|
|
||||||
try:
|
|
||||||
host_facts = wrap_var(self._fact_cache.get(host.name, dict()))
|
|
||||||
if not C.NAMESPACE_FACTS:
|
|
||||||
# allow facts to polute main namespace
|
|
||||||
all_vars = combine_vars(all_vars, host_facts)
|
|
||||||
# always return namespaced facts
|
|
||||||
all_vars = combine_vars(all_vars, {'ansible_facts': host_facts})
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if play:
|
|
||||||
all_vars = combine_vars(all_vars, play.get_vars())
|
|
||||||
|
|
||||||
for vars_file_item in play.get_vars_files():
|
|
||||||
# create a set of temporary vars here, which incorporate the extra
|
|
||||||
# and magic vars so we can properly template the vars_files entries
|
|
||||||
temp_vars = combine_vars(all_vars, self._extra_vars)
|
|
||||||
temp_vars = combine_vars(temp_vars, magic_variables)
|
|
||||||
templar = Templar(loader=loader, variables=temp_vars)
|
|
||||||
|
|
||||||
# we assume each item in the list is itself a list, as we
|
|
||||||
# support "conditional includes" for vars_files, which mimics
|
|
||||||
# the with_first_found mechanism.
|
|
||||||
vars_file_list = vars_file_item
|
|
||||||
if not isinstance(vars_file_list, list):
|
|
||||||
vars_file_list = [vars_file_list]
|
|
||||||
|
|
||||||
# now we iterate through the (potential) files, and break out
|
|
||||||
# as soon as we read one from the list. If none are found, we
|
|
||||||
# raise an error, which is silently ignored at this point.
|
|
||||||
try:
|
|
||||||
for vars_file in vars_file_list:
|
|
||||||
vars_file = templar.template(vars_file)
|
|
||||||
try:
|
|
||||||
data = preprocess_vars(loader.load_from_file(vars_file))
|
|
||||||
if data is not None:
|
|
||||||
for item in data:
|
|
||||||
all_vars = combine_vars(all_vars, item)
|
|
||||||
break
|
|
||||||
except AnsibleFileNotFound:
|
|
||||||
# we continue on loader failures
|
|
||||||
continue
|
|
||||||
except AnsibleParserError:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
# if include_delegate_to is set to False, we ignore the missing
|
|
||||||
# vars file here because we're working on a delegated host
|
|
||||||
if include_delegate_to:
|
|
||||||
raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
|
|
||||||
except (UndefinedError, AnsibleUndefinedVariable):
|
|
||||||
if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
|
|
||||||
raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'" % vars_file_item,
|
|
||||||
obj=vars_file_item)
|
|
||||||
else:
|
|
||||||
# we do not have a full context here, and the missing variable could be
|
|
||||||
# because of that, so just show a warning and continue
|
|
||||||
display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# By default, we now merge in all vars from all roles in the play,
|
|
||||||
# unless the user has disabled this via a config option
|
|
||||||
if not C.DEFAULT_PRIVATE_ROLE_VARS:
|
|
||||||
for role in play.get_roles():
|
|
||||||
all_vars = combine_vars(all_vars, role.get_vars(include_params=False))
|
|
||||||
|
|
||||||
# next, we merge in the vars from the role, which will specifically
|
|
||||||
# follow the role dependency chain, and then we merge in the tasks
|
|
||||||
# vars (which will look at parent blocks/task includes)
|
|
||||||
if task:
|
|
||||||
if task._role:
|
|
||||||
all_vars = combine_vars(all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False))
|
|
||||||
all_vars = combine_vars(all_vars, task.get_vars())
|
|
||||||
|
|
||||||
# next, we merge in the vars cache (include vars) and nonpersistent
|
|
||||||
# facts cache (set_fact/register), in that order
|
|
||||||
if host:
|
|
||||||
all_vars = combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict()))
|
|
||||||
all_vars = combine_vars(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()))
|
|
||||||
|
|
||||||
# next, we merge in role params and task include params
|
|
||||||
if task:
|
|
||||||
if task._role:
|
|
||||||
all_vars = combine_vars(all_vars, task._role.get_role_params(task.get_dep_chain()))
|
|
||||||
|
|
||||||
# special case for include tasks, where the include params
|
|
||||||
# may be specified in the vars field for the task, which should
|
|
||||||
# have higher precedence than the vars/np facts above
|
|
||||||
all_vars = combine_vars(all_vars, task.get_include_params())
|
|
||||||
|
|
||||||
# finally, we merge in extra vars and the magic variables
|
|
||||||
all_vars = combine_vars(all_vars, self._extra_vars)
|
|
||||||
all_vars = combine_vars(all_vars, magic_variables)
|
|
||||||
|
|
||||||
# special case for the 'environment' magic variable, as someone
|
|
||||||
# may have set it as a variable and we don't want to stomp on it
|
|
||||||
if task:
|
|
||||||
if 'environment' not in all_vars:
|
|
||||||
all_vars['environment'] = task.environment
|
|
||||||
else:
|
|
||||||
display.warning("The variable 'environment' appears to be used already, which is also used internally for environment variables set on the "
|
|
||||||
"task/block/play. You should use a different variable name to avoid conflicts with this internal variable")
|
|
||||||
|
|
||||||
# if we have a task and we're delegating to another host, figure out the
|
|
||||||
# variables for that host now so we don't have to rely on hostvars later
|
|
||||||
if task and task.delegate_to is not None and include_delegate_to:
|
|
||||||
all_vars['ansible_delegated_vars'] = self._get_delegated_vars(loader, play, task, all_vars)
|
|
||||||
|
|
||||||
# VARIABLE_CACHE[cache_entry] = all_vars
|
|
||||||
if task or play:
|
|
||||||
all_vars['vars'] = all_vars.copy()
|
|
||||||
|
|
||||||
display.debug("done with get_vars()")
|
|
||||||
return all_vars
|
|
||||||
|
|
||||||
def invalidate_hostvars_cache(self, play):
|
|
||||||
hostvars_cache_entry = self._get_cache_entry(play=play)
|
|
||||||
if hostvars_cache_entry in HOSTVARS_CACHE:
|
|
||||||
del HOSTVARS_CACHE[hostvars_cache_entry]
|
|
||||||
|
|
||||||
def _get_magic_variables(self, loader, play, host, task, include_hostvars, include_delegate_to):
|
|
||||||
'''
|
|
||||||
Returns a dictionary of so-called "magic" variables in Ansible,
|
|
||||||
which are special variables we set internally for use.
|
|
||||||
'''
|
|
||||||
|
|
||||||
variables = dict()
|
|
||||||
variables['playbook_dir'] = loader.get_basedir()
|
|
||||||
variables['ansible_playbook_python'] = sys.executable
|
|
||||||
|
|
||||||
if host:
|
|
||||||
# host already provides some magic vars via host.get_vars()
|
|
||||||
if self._inventory:
|
|
||||||
variables['groups'] = self._inventory.get_group_dict()
|
|
||||||
|
|
||||||
if play:
|
|
||||||
variables['role_names'] = [r._role_name for r in play.roles]
|
|
||||||
|
|
||||||
if task:
|
|
||||||
if task._role:
|
|
||||||
variables['role_name'] = task._role.get_name()
|
|
||||||
variables['role_path'] = task._role._role_path
|
|
||||||
variables['role_uuid'] = text_type(task._role._uuid)
|
|
||||||
|
|
||||||
if self._inventory is not None:
|
|
||||||
variables['inventory_dir'] = self._inventory.basedir()
|
|
||||||
variables['inventory_file'] = self._inventory.src()
|
|
||||||
if play:
|
|
||||||
templar = Templar(loader=loader)
|
|
||||||
if templar.is_template(play.hosts):
|
|
||||||
pattern = 'all'
|
|
||||||
else:
|
|
||||||
pattern = play.hosts or 'all'
|
|
||||||
# add the list of hosts in the play, as adjusted for limit/filters
|
|
||||||
variables['ansible_play_hosts_all'] = [x.name for x in self._inventory.get_hosts(pattern=pattern, ignore_restrictions=True)]
|
|
||||||
variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
|
|
||||||
variables['ansible_play_batch'] = [x.name for x in self._inventory.get_hosts() if x.name not in play._removed_hosts]
|
|
||||||
|
|
||||||
# DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
|
|
||||||
# however this would take work in the templating engine, so for now we'll add both
|
|
||||||
variables['play_hosts'] = variables['ansible_play_batch']
|
|
||||||
|
|
||||||
# the 'omit' value alows params to be left out if the variable they are based on is undefined
|
|
||||||
variables['omit'] = self._omit_token
|
|
||||||
variables['ansible_version'] = CLI.version_info(gitinfo=False)
|
|
||||||
# Set options vars
|
|
||||||
for option, option_value in iteritems(self._options_vars):
|
|
||||||
variables[option] = option_value
|
|
||||||
|
|
||||||
if self._hostvars is not None and include_hostvars:
|
|
||||||
variables['hostvars'] = self._hostvars
|
|
||||||
|
|
||||||
return variables
|
|
||||||
|
|
||||||
def _get_delegated_vars(self, loader, play, task, existing_variables):
|
|
||||||
# we unfortunately need to template the delegate_to field here,
|
|
||||||
# as we're fetching vars before post_validate has been called on
|
|
||||||
# the task that has been passed in
|
|
||||||
vars_copy = existing_variables.copy()
|
|
||||||
templar = Templar(loader=loader, variables=vars_copy)
|
|
||||||
|
|
||||||
items = []
|
|
||||||
if task.loop is not None:
|
|
||||||
if task.loop in lookup_loader:
|
|
||||||
try:
|
|
||||||
loop_terms = listify_lookup_plugin_terms(terms=task.loop_args, templar=templar, loader=loader, fail_on_undefined=True, convert_bare=False)
|
|
||||||
items = lookup_loader.get(task.loop, loader=loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
|
|
||||||
except AnsibleUndefinedVariable:
|
|
||||||
# This task will be skipped later due to this, so we just setup
|
|
||||||
# a dummy array for the later code so it doesn't fail
|
|
||||||
items = [None]
|
|
||||||
else:
|
|
||||||
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % task.loop)
|
|
||||||
else:
|
|
||||||
items = [None]
|
|
||||||
|
|
||||||
delegated_host_vars = dict()
|
|
||||||
for item in items:
|
|
||||||
# update the variables with the item value for templating, in case we need it
|
|
||||||
if item is not None:
|
|
||||||
vars_copy['item'] = item
|
|
||||||
|
|
||||||
templar.set_available_variables(vars_copy)
|
|
||||||
delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
|
|
||||||
if delegated_host_name is None:
|
|
||||||
raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
|
|
||||||
if delegated_host_name in delegated_host_vars:
|
|
||||||
# no need to repeat ourselves, as the delegate_to value
|
|
||||||
# does not appear to be tied to the loop item variable
|
|
||||||
continue
|
|
||||||
|
|
||||||
# a dictionary of variables to use if we have to create a new host below
|
|
||||||
# we set the default port based on the default transport here, to make sure
|
|
||||||
# we use the proper default for windows
|
|
||||||
new_port = C.DEFAULT_REMOTE_PORT
|
|
||||||
if C.DEFAULT_TRANSPORT == 'winrm':
|
|
||||||
new_port = 5986
|
|
||||||
|
|
||||||
new_delegated_host_vars = dict(
|
|
||||||
ansible_host=delegated_host_name,
|
|
||||||
ansible_port=new_port,
|
|
||||||
ansible_user=C.DEFAULT_REMOTE_USER,
|
|
||||||
ansible_connection=C.DEFAULT_TRANSPORT,
|
|
||||||
)
|
|
||||||
|
|
||||||
# now try to find the delegated-to host in inventory, or failing that,
|
|
||||||
# create a new host on the fly so we can fetch variables for it
|
|
||||||
delegated_host = None
|
|
||||||
if self._inventory is not None:
|
|
||||||
delegated_host = self._inventory.get_host(delegated_host_name)
|
|
||||||
# try looking it up based on the address field, and finally
|
|
||||||
# fall back to creating a host on the fly to use for the var lookup
|
|
||||||
if delegated_host is None:
|
|
||||||
if delegated_host_name in C.LOCALHOST:
|
|
||||||
delegated_host = self._inventory.localhost
|
|
||||||
else:
|
|
||||||
for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
|
|
||||||
# check if the address matches, or if both the delegated_to host
|
|
||||||
# and the current host are in the list of localhost aliases
|
|
||||||
if h.address == delegated_host_name:
|
|
||||||
delegated_host = h
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
delegated_host = Host(name=delegated_host_name)
|
|
||||||
delegated_host.vars.update(new_delegated_host_vars)
|
|
||||||
else:
|
|
||||||
delegated_host = Host(name=delegated_host_name)
|
|
||||||
delegated_host.vars.update(new_delegated_host_vars)
|
|
||||||
|
|
||||||
# now we go fetch the vars for the delegated-to host and save them in our
|
|
||||||
# master dictionary of variables to be used later in the TaskExecutor/PlayContext
|
|
||||||
delegated_host_vars[delegated_host_name] = self.get_vars(
|
|
||||||
loader=loader,
|
|
||||||
play=play,
|
|
||||||
host=delegated_host,
|
|
||||||
task=task,
|
|
||||||
include_delegate_to=False,
|
|
||||||
include_hostvars=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
return delegated_host_vars
|
|
||||||
|
|
||||||
def _get_inventory_basename(self, path):
|
|
||||||
'''
|
|
||||||
Returns the basename minus the extension of the given path, so the
|
|
||||||
bare filename can be matched against host/group names later
|
|
||||||
'''
|
|
||||||
|
|
||||||
(name, ext) = os.path.splitext(os.path.basename(path))
|
|
||||||
if ext not in ('.yml', '.yaml'):
|
|
||||||
return os.path.basename(path)
|
|
||||||
else:
|
|
||||||
return name
|
|
||||||
|
|
||||||
def _load_inventory_file(self, path, loader, filter_ext=False):
|
|
||||||
'''
|
|
||||||
helper function, which loads the file and gets the
|
|
||||||
basename of the file without the extension
|
|
||||||
'''
|
|
||||||
|
|
||||||
if loader.is_directory(path):
|
|
||||||
data = dict()
|
|
||||||
|
|
||||||
try:
|
|
||||||
names = loader.list_directory(path)
|
|
||||||
except os.error as err:
|
|
||||||
raise AnsibleError("This folder cannot be listed: %s: %s." % (path, err.strerror))
|
|
||||||
|
|
||||||
# evaluate files in a stable order rather than whatever
|
|
||||||
# order the filesystem lists them.
|
|
||||||
names.sort()
|
|
||||||
|
|
||||||
# do not parse hidden files or dirs, e.g. .svn/
|
|
||||||
paths = [os.path.join(path, name) for name in names if not (name.startswith('.') or name.endswith('~'))]
|
|
||||||
for p in paths:
|
|
||||||
results = self._load_inventory_file(path=p, loader=loader, filter_ext=True)
|
|
||||||
if results is not None:
|
|
||||||
data = combine_vars(data, results)
|
|
||||||
|
|
||||||
else:
|
|
||||||
file_name, ext = os.path.splitext(path)
|
|
||||||
data = None
|
|
||||||
if not filter_ext or ext in C.YAML_FILENAME_EXTENSIONS:
|
|
||||||
if loader.path_exists(path):
|
|
||||||
data = loader.load_from_file(path)
|
|
||||||
else:
|
|
||||||
# try appending yaml extenstion to find valid files
|
|
||||||
# avoid empty extensions otherwise all files would be tried
|
|
||||||
for test_ext in (ext for ext in C.YAML_FILENAME_EXTENSIONS if ext):
|
|
||||||
new_path = path + test_ext
|
|
||||||
if loader.path_exists(new_path):
|
|
||||||
data = loader.load_from_file(new_path)
|
|
||||||
break
|
|
||||||
|
|
||||||
rval = AnsibleInventoryVarsData()
|
|
||||||
rval.path = path
|
|
||||||
|
|
||||||
if data is not None:
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
raise AnsibleError("Problem parsing file '%s': line %d, column %d" % data.ansible_pos)
|
|
||||||
else:
|
|
||||||
rval.update(data)
|
|
||||||
|
|
||||||
return rval
|
|
||||||
|
|
||||||
def add_host_vars_file(self, path, loader):
|
|
||||||
'''
|
|
||||||
Loads and caches a host_vars file in the _host_vars_files dict,
|
|
||||||
where the key to that dictionary is the basename of the file, minus
|
|
||||||
the extension, for matching against a given inventory host name
|
|
||||||
'''
|
|
||||||
|
|
||||||
name = self._get_inventory_basename(path)
|
|
||||||
if name not in self._host_vars_files:
|
|
||||||
self._host_vars_files[name] = []
|
|
||||||
|
|
||||||
for entry in self._host_vars_files[name]:
|
|
||||||
if entry.path == path:
|
|
||||||
data = entry
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
data = self._load_inventory_file(path, loader)
|
|
||||||
if data:
|
|
||||||
self._host_vars_files[name].append(data)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def add_group_vars_file(self, path, loader):
|
|
||||||
'''
|
|
||||||
Loads and caches a host_vars file in the _host_vars_files dict,
|
|
||||||
where the key to that dictionary is the basename of the file, minus
|
|
||||||
the extension, for matching against a given inventory host name
|
|
||||||
'''
|
|
||||||
|
|
||||||
name = self._get_inventory_basename(path)
|
|
||||||
if name not in self._group_vars_files:
|
|
||||||
self._group_vars_files[name] = []
|
|
||||||
|
|
||||||
for entry in self._group_vars_files[name]:
|
|
||||||
if entry.path == path:
|
|
||||||
data = entry
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
data = self._load_inventory_file(path, loader)
|
|
||||||
if data:
|
|
||||||
self._group_vars_files[name].append(data)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def clear_playbook_hostgroup_vars_files(self, path):
|
|
||||||
for f in self._host_vars_files.keys():
|
|
||||||
keepers = []
|
|
||||||
for entry in self._host_vars_files[f]:
|
|
||||||
if os.path.dirname(entry.path) != os.path.join(path, 'host_vars'):
|
|
||||||
keepers.append(entry)
|
|
||||||
self._host_vars_files[f] = keepers
|
|
||||||
for f in self._group_vars_files.keys():
|
|
||||||
keepers = []
|
|
||||||
for entry in self._group_vars_files[f]:
|
|
||||||
if os.path.dirname(entry.path) != os.path.join(path, 'group_vars'):
|
|
||||||
keepers.append(entry)
|
|
||||||
self._group_vars_files[f] = keepers
|
|
||||||
|
|
||||||
def clear_facts(self, hostname):
|
|
||||||
'''
|
|
||||||
Clears the facts for a host
|
|
||||||
'''
|
|
||||||
if hostname in self._fact_cache:
|
|
||||||
del self._fact_cache[hostname]
|
|
||||||
|
|
||||||
def set_host_facts(self, host, facts):
|
|
||||||
'''
|
|
||||||
Sets or updates the given facts for a host in the fact cache.
|
|
||||||
'''
|
|
||||||
|
|
||||||
assert isinstance(facts, dict)
|
|
||||||
|
|
||||||
if host.name not in self._fact_cache:
|
|
||||||
self._fact_cache[host.name] = facts
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self._fact_cache.update(host.name, facts)
|
|
||||||
except KeyError:
|
|
||||||
self._fact_cache[host.name] = facts
|
|
||||||
|
|
||||||
def set_nonpersistent_facts(self, host, facts):
|
|
||||||
'''
|
|
||||||
Sets or updates the given facts for a host in the fact cache.
|
|
||||||
'''
|
|
||||||
|
|
||||||
assert isinstance(facts, dict)
|
|
||||||
|
|
||||||
if host.name not in self._nonpersistent_fact_cache:
|
|
||||||
self._nonpersistent_fact_cache[host.name] = facts
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self._nonpersistent_fact_cache[host.name].update(facts)
|
|
||||||
except KeyError:
|
|
||||||
self._nonpersistent_fact_cache[host.name] = facts
|
|
||||||
|
|
||||||
def set_host_variable(self, host, varname, value):
|
|
||||||
'''
|
|
||||||
Sets a value in the vars_cache for a host.
|
|
||||||
'''
|
|
||||||
host_name = host.get_name()
|
|
||||||
if host_name not in self._vars_cache:
|
|
||||||
self._vars_cache[host_name] = dict()
|
|
||||||
if varname in self._vars_cache[host_name] and isinstance(self._vars_cache[host_name][varname], MutableMapping) and isinstance(value, MutableMapping):
|
|
||||||
self._vars_cache[host_name] = combine_vars(self._vars_cache[host_name], {varname: value})
|
|
||||||
else:
|
|
||||||
self._vars_cache[host_name][varname] = value
|
|
@ -0,0 +1,598 @@
|
|||||||
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
# Make coding more python3-ish
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from collections import defaultdict, MutableMapping
|
||||||
|
|
||||||
|
try:
|
||||||
|
from hashlib import sha1
|
||||||
|
except ImportError:
|
||||||
|
from sha import sha as sha1
|
||||||
|
|
||||||
|
from jinja2.exceptions import UndefinedError
|
||||||
|
|
||||||
|
from ansible import constants as C
|
||||||
|
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound
|
||||||
|
from ansible.inventory.host import Host
|
||||||
|
from ansible.inventory.helpers import sort_groups, get_group_vars
|
||||||
|
from ansible.module_utils._text import to_native
|
||||||
|
from ansible.module_utils.six import iteritems, string_types, text_type
|
||||||
|
from ansible.plugins import lookup_loader, vars_loader
|
||||||
|
from ansible.plugins.cache import FactCache
|
||||||
|
from ansible.template import Templar
|
||||||
|
from ansible.utils.listify import listify_lookup_plugin_terms
|
||||||
|
from ansible.utils.vars import combine_vars
|
||||||
|
from ansible.utils.unsafe_proxy import wrap_var
|
||||||
|
|
||||||
|
try:
|
||||||
|
from __main__ import display
|
||||||
|
except ImportError:
|
||||||
|
from ansible.utils.display import Display
|
||||||
|
display = Display()
|
||||||
|
|
||||||
|
|
||||||
|
def preprocess_vars(a):
|
||||||
|
'''
|
||||||
|
Ensures that vars contained in the parameter passed in are
|
||||||
|
returned as a list of dictionaries, to ensure for instance
|
||||||
|
that vars loaded from a file conform to an expected state.
|
||||||
|
'''
|
||||||
|
|
||||||
|
if a is None:
|
||||||
|
return None
|
||||||
|
elif not isinstance(a, list):
|
||||||
|
data = [ a ]
|
||||||
|
else:
|
||||||
|
data = a
|
||||||
|
|
||||||
|
for item in data:
|
||||||
|
if not isinstance(item, MutableMapping):
|
||||||
|
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def strip_internal_keys(dirty):
|
||||||
|
'''
|
||||||
|
All keys stating with _ansible_ are internal, so create a copy of the 'dirty' dict
|
||||||
|
and remove them from the clean one before returning it
|
||||||
|
'''
|
||||||
|
clean = dirty.copy()
|
||||||
|
for k in dirty.keys():
|
||||||
|
if isinstance(k, string_types) and k.startswith('_ansible_'):
|
||||||
|
del clean[k]
|
||||||
|
elif isinstance(dirty[k], dict):
|
||||||
|
clean[k] = strip_internal_keys(dirty[k])
|
||||||
|
return clean
|
||||||
|
|
||||||
|
|
||||||
|
class VariableManager:
|
||||||
|
|
||||||
|
def __init__(self, loader=None, inventory=None):
|
||||||
|
|
||||||
|
self._nonpersistent_fact_cache = defaultdict(dict)
|
||||||
|
self._vars_cache = defaultdict(dict)
|
||||||
|
self._extra_vars = defaultdict(dict)
|
||||||
|
self._host_vars_files = defaultdict(dict)
|
||||||
|
self._group_vars_files = defaultdict(dict)
|
||||||
|
self._inventory = inventory
|
||||||
|
self._loader = loader
|
||||||
|
self._hostvars = None
|
||||||
|
self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
|
||||||
|
self._options_vars = defaultdict(dict)
|
||||||
|
|
||||||
|
# bad cache plugin is not fatal error
|
||||||
|
try:
|
||||||
|
self._fact_cache = FactCache()
|
||||||
|
except AnsibleError as e:
|
||||||
|
display.warning(to_native(e))
|
||||||
|
# fallback to a dict as in memory cache
|
||||||
|
self._fact_cache = {}
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
data = dict(
|
||||||
|
fact_cache = self._fact_cache,
|
||||||
|
np_fact_cache = self._nonpersistent_fact_cache,
|
||||||
|
vars_cache = self._vars_cache,
|
||||||
|
extra_vars = self._extra_vars,
|
||||||
|
host_vars_files = self._host_vars_files,
|
||||||
|
group_vars_files = self._group_vars_files,
|
||||||
|
omit_token = self._omit_token,
|
||||||
|
options_vars = self._options_vars,
|
||||||
|
inventory = self._inventory,
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def __setstate__(self, data):
|
||||||
|
self._fact_cache = data.get('fact_cache', defaultdict(dict))
|
||||||
|
self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
|
||||||
|
self._vars_cache = data.get('vars_cache', defaultdict(dict))
|
||||||
|
self._extra_vars = data.get('extra_vars', dict())
|
||||||
|
self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
|
||||||
|
self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
|
||||||
|
self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
|
||||||
|
self._inventory = data.get('inventory', None)
|
||||||
|
self._options_vars = data.get('options_vars', dict())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_vars(self):
|
||||||
|
''' ensures a clean copy of the extra_vars are made '''
|
||||||
|
return self._extra_vars.copy()
|
||||||
|
|
||||||
|
@extra_vars.setter
|
||||||
|
def extra_vars(self, value):
|
||||||
|
''' ensures a clean copy of the extra_vars are used to set the value '''
|
||||||
|
assert isinstance(value, MutableMapping)
|
||||||
|
self._extra_vars = value.copy()
|
||||||
|
|
||||||
|
def set_inventory(self, inventory):
|
||||||
|
self._inventory = inventory
|
||||||
|
|
||||||
|
@property
|
||||||
|
def options_vars(self):
|
||||||
|
''' ensures a clean copy of the options_vars are made '''
|
||||||
|
return self._options_vars.copy()
|
||||||
|
|
||||||
|
@options_vars.setter
|
||||||
|
def options_vars(self, value):
|
||||||
|
''' ensures a clean copy of the options_vars are used to set the value '''
|
||||||
|
assert isinstance(value, dict)
|
||||||
|
self._options_vars = value.copy()
|
||||||
|
|
||||||
|
def _preprocess_vars(self, a):
|
||||||
|
'''
|
||||||
|
Ensures that vars contained in the parameter passed in are
|
||||||
|
returned as a list of dictionaries, to ensure for instance
|
||||||
|
that vars loaded from a file conform to an expected state.
|
||||||
|
'''
|
||||||
|
|
||||||
|
if a is None:
|
||||||
|
return None
|
||||||
|
elif not isinstance(a, list):
|
||||||
|
data = [ a ]
|
||||||
|
else:
|
||||||
|
data = a
|
||||||
|
|
||||||
|
for item in data:
|
||||||
|
if not isinstance(item, MutableMapping):
|
||||||
|
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def get_vars(self, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True):
|
||||||
|
'''
|
||||||
|
Returns the variables, with optional "context" given via the parameters
|
||||||
|
for the play, host, and task (which could possibly result in different
|
||||||
|
sets of variables being returned due to the additional context).
|
||||||
|
|
||||||
|
The order of precedence is:
|
||||||
|
- play->roles->get_default_vars (if there is a play context)
|
||||||
|
- group_vars_files[host] (if there is a host context)
|
||||||
|
- host_vars_files[host] (if there is a host context)
|
||||||
|
- host->get_vars (if there is a host context)
|
||||||
|
- fact_cache[host] (if there is a host context)
|
||||||
|
- play vars (if there is a play context)
|
||||||
|
- play vars_files (if there's no host context, ignore
|
||||||
|
file names that cannot be templated)
|
||||||
|
- task->get_vars (if there is a task context)
|
||||||
|
- vars_cache[host] (if there is a host context)
|
||||||
|
- extra vars
|
||||||
|
'''
|
||||||
|
|
||||||
|
display.debug("in VariableManager get_vars()")
|
||||||
|
|
||||||
|
all_vars = dict()
|
||||||
|
magic_variables = self._get_magic_variables(
|
||||||
|
play=play,
|
||||||
|
host=host,
|
||||||
|
task=task,
|
||||||
|
include_hostvars=include_hostvars,
|
||||||
|
include_delegate_to=include_delegate_to,
|
||||||
|
)
|
||||||
|
|
||||||
|
if play:
|
||||||
|
# first we compile any vars specified in defaults/main.yml
|
||||||
|
# for all roles within the specified play
|
||||||
|
for role in play.get_roles():
|
||||||
|
all_vars = combine_vars(all_vars, role.get_default_vars())
|
||||||
|
|
||||||
|
# if we have a task in this context, and that task has a role, make
|
||||||
|
# sure it sees its defaults above any other roles, as we previously
|
||||||
|
# (v1) made sure each task had a copy of its roles default vars
|
||||||
|
if task and task._role is not None and (play or task.action == 'include_role'):
|
||||||
|
all_vars = combine_vars(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()))
|
||||||
|
|
||||||
|
if host:
|
||||||
|
### INIT WORK (use unsafe as we are going to copy/merge vars, no need to x2 copy)
|
||||||
|
# basedir, THE 'all' group and the rest of groups for a host, used below
|
||||||
|
basedir = self._loader.get_basedir()
|
||||||
|
all_group = self._inventory.groups.get('all')
|
||||||
|
host_groups = sort_groups([g for g in host.get_groups() if g.name not in ['all']])
|
||||||
|
|
||||||
|
### internal fuctions that actually do the work ###
|
||||||
|
def _plugins_inventory(entities):
|
||||||
|
''' merges all entities by inventory source '''
|
||||||
|
data = {}
|
||||||
|
for inventory_dir in self._inventory._sources:
|
||||||
|
if ',' in inventory_dir: # skip host lists
|
||||||
|
continue
|
||||||
|
elif not os.path.isdir(inventory_dir): # always pass 'inventory directory'
|
||||||
|
inventory_dir = os.path.dirname(inventory_dir)
|
||||||
|
|
||||||
|
for plugin in vars_loader.all():
|
||||||
|
data = combine_vars(data, plugin.get_vars(self._loader, inventory_dir, entities))
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _plugins_play(entities):
|
||||||
|
''' merges all entities adjacent to play '''
|
||||||
|
data = {}
|
||||||
|
for plugin in vars_loader.all():
|
||||||
|
data = combine_vars(data, plugin.get_vars(self._loader, basedir, entities))
|
||||||
|
return data
|
||||||
|
|
||||||
|
### configurable functions that are sortable via config ###
|
||||||
|
def all_inventory():
|
||||||
|
return all_group.get_vars()
|
||||||
|
|
||||||
|
def all_plugins_inventory():
|
||||||
|
return _plugins_inventory([all_group])
|
||||||
|
|
||||||
|
def all_plugins_play():
|
||||||
|
return _plugins_play([all_group])
|
||||||
|
|
||||||
|
def groups_inventory():
|
||||||
|
''' gets group vars from inventory '''
|
||||||
|
return get_group_vars(host_groups)
|
||||||
|
|
||||||
|
def groups_plugins_inventory():
|
||||||
|
''' gets plugin sources from inventory for groups '''
|
||||||
|
return _plugins_inventory(host_groups)
|
||||||
|
|
||||||
|
def groups_plugins_play():
|
||||||
|
''' gets plugin sources from play for groups '''
|
||||||
|
return _plugins_play(host_groups)
|
||||||
|
|
||||||
|
def plugins_by_groups():
|
||||||
|
'''
|
||||||
|
merges all plugin sources by group,
|
||||||
|
This should be used instead, NOT in combination with the other groups_plugins* functions
|
||||||
|
'''
|
||||||
|
data = {}
|
||||||
|
for group in host_groups:
|
||||||
|
data[group] = combine_vars(data[group], _plugins_inventory(group))
|
||||||
|
data[group] = combine_vars(data[group], _plugins_play(group))
|
||||||
|
return data
|
||||||
|
|
||||||
|
# Merge as per precedence config
|
||||||
|
for entry in C.VARIABLE_PRECEDENCE:
|
||||||
|
# only allow to call the functions we want exposed
|
||||||
|
if entry.startswith('_') or '.' in entry:
|
||||||
|
continue
|
||||||
|
display.debug('Calling %s to load vars for %s' % (entry, host.name))
|
||||||
|
all_vars = combine_vars(all_vars, locals()[entry]())
|
||||||
|
|
||||||
|
# host vars, from inventory, inventory adjacent and play adjacent via plugins
|
||||||
|
all_vars = combine_vars(all_vars, host.get_vars())
|
||||||
|
all_vars = combine_vars(all_vars, _plugins_inventory([host]))
|
||||||
|
all_vars = combine_vars(all_vars, _plugins_play([host]))
|
||||||
|
|
||||||
|
# finally, the facts caches for this host, if it exists
|
||||||
|
try:
|
||||||
|
host_facts = wrap_var(self._fact_cache.get(host.name, dict()))
|
||||||
|
if not C.NAMESPACE_FACTS:
|
||||||
|
# allow facts to polute main namespace
|
||||||
|
all_vars = combine_vars(all_vars, host_facts)
|
||||||
|
# always return namespaced facts
|
||||||
|
all_vars = combine_vars(all_vars, {'ansible_facts': host_facts})
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if play:
|
||||||
|
all_vars = combine_vars(all_vars, play.get_vars())
|
||||||
|
|
||||||
|
for vars_file_item in play.get_vars_files():
|
||||||
|
# create a set of temporary vars here, which incorporate the extra
|
||||||
|
# and magic vars so we can properly template the vars_files entries
|
||||||
|
temp_vars = combine_vars(all_vars, self._extra_vars)
|
||||||
|
temp_vars = combine_vars(temp_vars, magic_variables)
|
||||||
|
templar = Templar(loader=self._loader, variables=temp_vars)
|
||||||
|
|
||||||
|
# we assume each item in the list is itself a list, as we
|
||||||
|
# support "conditional includes" for vars_files, which mimics
|
||||||
|
# the with_first_found mechanism.
|
||||||
|
vars_file_list = vars_file_item
|
||||||
|
if not isinstance(vars_file_list, list):
|
||||||
|
vars_file_list = [vars_file_list]
|
||||||
|
|
||||||
|
# now we iterate through the (potential) files, and break out
|
||||||
|
# as soon as we read one from the list. If none are found, we
|
||||||
|
# raise an error, which is silently ignored at this point.
|
||||||
|
try:
|
||||||
|
for vars_file in vars_file_list:
|
||||||
|
vars_file = templar.template(vars_file)
|
||||||
|
try:
|
||||||
|
data = preprocess_vars(self._loader.load_from_file(vars_file, unsafe=True))
|
||||||
|
if data is not None:
|
||||||
|
for item in data:
|
||||||
|
all_vars = combine_vars(all_vars, item)
|
||||||
|
break
|
||||||
|
except AnsibleFileNotFound:
|
||||||
|
# we continue on loader failures
|
||||||
|
continue
|
||||||
|
except AnsibleParserError:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
# if include_delegate_to is set to False, we ignore the missing
|
||||||
|
# vars file here because we're working on a delegated host
|
||||||
|
if include_delegate_to:
|
||||||
|
raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
|
||||||
|
except (UndefinedError, AnsibleUndefinedVariable):
|
||||||
|
if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
|
||||||
|
raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'" % vars_file_item,
|
||||||
|
obj=vars_file_item)
|
||||||
|
else:
|
||||||
|
# we do not have a full context here, and the missing variable could be
|
||||||
|
# because of that, so just show a warning and continue
|
||||||
|
display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# By default, we now merge in all vars from all roles in the play,
|
||||||
|
# unless the user has disabled this via a config option
|
||||||
|
if not C.DEFAULT_PRIVATE_ROLE_VARS:
|
||||||
|
for role in play.get_roles():
|
||||||
|
all_vars = combine_vars(all_vars, role.get_vars(include_params=False))
|
||||||
|
|
||||||
|
# next, we merge in the vars from the role, which will specifically
|
||||||
|
# follow the role dependency chain, and then we merge in the tasks
|
||||||
|
# vars (which will look at parent blocks/task includes)
|
||||||
|
if task:
|
||||||
|
if task._role:
|
||||||
|
all_vars = combine_vars(all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False))
|
||||||
|
all_vars = combine_vars(all_vars, task.get_vars())
|
||||||
|
|
||||||
|
# next, we merge in the vars cache (include vars) and nonpersistent
|
||||||
|
# facts cache (set_fact/register), in that order
|
||||||
|
if host:
|
||||||
|
all_vars = combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict()))
|
||||||
|
all_vars = combine_vars(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()))
|
||||||
|
|
||||||
|
# next, we merge in role params and task include params
|
||||||
|
if task:
|
||||||
|
if task._role:
|
||||||
|
all_vars = combine_vars(all_vars, task._role.get_role_params(task.get_dep_chain()))
|
||||||
|
|
||||||
|
# special case for include tasks, where the include params
|
||||||
|
# may be specified in the vars field for the task, which should
|
||||||
|
# have higher precedence than the vars/np facts above
|
||||||
|
all_vars = combine_vars(all_vars, task.get_include_params())
|
||||||
|
|
||||||
|
# extra vars
|
||||||
|
all_vars = combine_vars(all_vars, self._extra_vars)
|
||||||
|
|
||||||
|
# magic variables
|
||||||
|
all_vars = combine_vars(all_vars, magic_variables)
|
||||||
|
|
||||||
|
# special case for the 'environment' magic variable, as someone
|
||||||
|
# may have set it as a variable and we don't want to stomp on it
|
||||||
|
if task:
|
||||||
|
all_vars['environment'] = task.environment
|
||||||
|
|
||||||
|
# if we have a task and we're delegating to another host, figure out the
|
||||||
|
# variables for that host now so we don't have to rely on hostvars later
|
||||||
|
if task and task.delegate_to is not None and include_delegate_to:
|
||||||
|
all_vars['ansible_delegated_vars'] = self._get_delegated_vars(play, task, all_vars)
|
||||||
|
|
||||||
|
# 'vars' magic var
|
||||||
|
if task or play:
|
||||||
|
# has to be copy, otherwise recursive ref
|
||||||
|
all_vars['vars'] = all_vars.copy()
|
||||||
|
|
||||||
|
display.debug("done with get_vars()")
|
||||||
|
return all_vars
|
||||||
|
|
||||||
|
def _get_magic_variables(self, play, host, task, include_hostvars, include_delegate_to):
|
||||||
|
'''
|
||||||
|
Returns a dictionary of so-called "magic" variables in Ansible,
|
||||||
|
which are special variables we set internally for use.
|
||||||
|
'''
|
||||||
|
|
||||||
|
variables = {}
|
||||||
|
variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
|
||||||
|
variables['ansible_playbook_python'] = sys.executable
|
||||||
|
|
||||||
|
if host:
|
||||||
|
# host already provides some magic vars via host.get_vars()
|
||||||
|
if self._inventory:
|
||||||
|
variables['groups'] = self._inventory.get_groups_dict()
|
||||||
|
if play:
|
||||||
|
variables['role_names'] = [r._role_name for r in play.roles]
|
||||||
|
|
||||||
|
if task:
|
||||||
|
if task._role:
|
||||||
|
variables['role_name'] = task._role.get_name()
|
||||||
|
variables['role_path'] = task._role._role_path
|
||||||
|
variables['role_uuid'] = text_type(task._role._uuid)
|
||||||
|
|
||||||
|
if self._inventory is not None:
|
||||||
|
if play:
|
||||||
|
templar = Templar(loader=self._loader)
|
||||||
|
if templar.is_template(play.hosts):
|
||||||
|
pattern = 'all'
|
||||||
|
else:
|
||||||
|
pattern = play.hosts or 'all'
|
||||||
|
# add the list of hosts in the play, as adjusted for limit/filters
|
||||||
|
variables['ansible_play_hosts_all'] = [x.name for x in self._inventory.get_hosts(pattern=pattern, ignore_restrictions=True)]
|
||||||
|
variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
|
||||||
|
variables['ansible_play_batch'] = [x.name for x in self._inventory.get_hosts() if x.name not in play._removed_hosts]
|
||||||
|
|
||||||
|
# DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
|
||||||
|
# however this would take work in the templating engine, so for now we'll add both
|
||||||
|
variables['play_hosts'] = variables['ansible_play_batch']
|
||||||
|
|
||||||
|
# the 'omit' value alows params to be left out if the variable they are based on is undefined
|
||||||
|
variables['omit'] = self._omit_token
|
||||||
|
# Set options vars
|
||||||
|
for option, option_value in iteritems(self._options_vars):
|
||||||
|
variables[option] = option_value
|
||||||
|
|
||||||
|
if self._hostvars is not None and include_hostvars:
|
||||||
|
variables['hostvars'] = self._hostvars
|
||||||
|
|
||||||
|
return variables
|
||||||
|
|
||||||
|
def _get_delegated_vars(self, play, task, existing_variables):
|
||||||
|
# we unfortunately need to template the delegate_to field here,
|
||||||
|
# as we're fetching vars before post_validate has been called on
|
||||||
|
# the task that has been passed in
|
||||||
|
vars_copy = existing_variables.copy()
|
||||||
|
templar = Templar(loader=self._loader, variables=vars_copy)
|
||||||
|
|
||||||
|
items = []
|
||||||
|
if task.loop is not None:
|
||||||
|
if task.loop in lookup_loader:
|
||||||
|
try:
|
||||||
|
loop_terms = listify_lookup_plugin_terms(terms=task.loop_args, templar=templar,
|
||||||
|
loader=self._loader, fail_on_undefined=True, convert_bare=False)
|
||||||
|
items = lookup_loader.get(task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
|
||||||
|
except AnsibleUndefinedVariable:
|
||||||
|
# This task will be skipped later due to this, so we just setup
|
||||||
|
# a dummy array for the later code so it doesn't fail
|
||||||
|
items = [None]
|
||||||
|
else:
|
||||||
|
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % task.loop)
|
||||||
|
else:
|
||||||
|
items = [None]
|
||||||
|
|
||||||
|
delegated_host_vars = dict()
|
||||||
|
for item in items:
|
||||||
|
# update the variables with the item value for templating, in case we need it
|
||||||
|
if item is not None:
|
||||||
|
vars_copy['item'] = item
|
||||||
|
|
||||||
|
templar.set_available_variables(vars_copy)
|
||||||
|
delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
|
||||||
|
if delegated_host_name is None:
|
||||||
|
raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
|
||||||
|
if delegated_host_name in delegated_host_vars:
|
||||||
|
# no need to repeat ourselves, as the delegate_to value
|
||||||
|
# does not appear to be tied to the loop item variable
|
||||||
|
continue
|
||||||
|
|
||||||
|
# a dictionary of variables to use if we have to create a new host below
|
||||||
|
# we set the default port based on the default transport here, to make sure
|
||||||
|
# we use the proper default for windows
|
||||||
|
new_port = C.DEFAULT_REMOTE_PORT
|
||||||
|
if C.DEFAULT_TRANSPORT == 'winrm':
|
||||||
|
new_port = 5986
|
||||||
|
|
||||||
|
new_delegated_host_vars = dict(
|
||||||
|
ansible_host=delegated_host_name,
|
||||||
|
ansible_port=new_port,
|
||||||
|
ansible_user=C.DEFAULT_REMOTE_USER,
|
||||||
|
ansible_connection=C.DEFAULT_TRANSPORT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# now try to find the delegated-to host in inventory, or failing that,
|
||||||
|
# create a new host on the fly so we can fetch variables for it
|
||||||
|
delegated_host = None
|
||||||
|
if self._inventory is not None:
|
||||||
|
delegated_host = self._inventory.get_host(delegated_host_name)
|
||||||
|
# try looking it up based on the address field, and finally
|
||||||
|
# fall back to creating a host on the fly to use for the var lookup
|
||||||
|
if delegated_host is None:
|
||||||
|
if delegated_host_name in C.LOCALHOST:
|
||||||
|
delegated_host = self._inventory.localhost
|
||||||
|
else:
|
||||||
|
for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
|
||||||
|
# check if the address matches, or if both the delegated_to host
|
||||||
|
# and the current host are in the list of localhost aliases
|
||||||
|
if h.address == delegated_host_name:
|
||||||
|
delegated_host = h
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
delegated_host = Host(name=delegated_host_name)
|
||||||
|
delegated_host.vars = combine_vars(delegated_host.vars, new_delegated_host_vars)
|
||||||
|
else:
|
||||||
|
delegated_host = Host(name=delegated_host_name)
|
||||||
|
delegated_host.vars = combine_vars(delegated_host.vars, new_delegated_host_vars)
|
||||||
|
|
||||||
|
# now we go fetch the vars for the delegated-to host and save them in our
|
||||||
|
# master dictionary of variables to be used later in the TaskExecutor/PlayContext
|
||||||
|
delegated_host_vars[delegated_host_name] = self.get_vars(
|
||||||
|
play=play,
|
||||||
|
host=delegated_host,
|
||||||
|
task=task,
|
||||||
|
include_delegate_to=False,
|
||||||
|
include_hostvars=False,
|
||||||
|
)
|
||||||
|
return delegated_host_vars
|
||||||
|
|
||||||
|
def clear_facts(self, hostname):
|
||||||
|
'''
|
||||||
|
Clears the facts for a host
|
||||||
|
'''
|
||||||
|
if hostname in self._fact_cache:
|
||||||
|
del self._fact_cache[hostname]
|
||||||
|
|
||||||
|
def set_host_facts(self, host, facts):
|
||||||
|
'''
|
||||||
|
Sets or updates the given facts for a host in the fact cache.
|
||||||
|
'''
|
||||||
|
|
||||||
|
assert isinstance(facts, dict)
|
||||||
|
|
||||||
|
if host.name not in self._fact_cache:
|
||||||
|
self._fact_cache[host.name] = facts
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self._fact_cache.update(host.name, facts)
|
||||||
|
except KeyError:
|
||||||
|
self._fact_cache[host.name] = facts
|
||||||
|
|
||||||
|
def set_nonpersistent_facts(self, host, facts):
|
||||||
|
'''
|
||||||
|
Sets or updates the given facts for a host in the fact cache.
|
||||||
|
'''
|
||||||
|
|
||||||
|
assert isinstance(facts, dict)
|
||||||
|
|
||||||
|
if host.name not in self._nonpersistent_fact_cache:
|
||||||
|
self._nonpersistent_fact_cache[host.name] = facts
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self._nonpersistent_fact_cache[host.name].update(facts)
|
||||||
|
except KeyError:
|
||||||
|
self._nonpersistent_fact_cache[host.name] = facts
|
||||||
|
|
||||||
|
def set_host_variable(self, host, varname, value):
|
||||||
|
'''
|
||||||
|
Sets a value in the vars_cache for a host.
|
||||||
|
'''
|
||||||
|
host_name = host.get_name()
|
||||||
|
if host_name not in self._vars_cache:
|
||||||
|
self._vars_cache[host_name] = dict()
|
||||||
|
if varname in self._vars_cache[host_name] and isinstance(self._vars_cache[host_name][varname], MutableMapping) and isinstance(value, MutableMapping):
|
||||||
|
self._vars_cache[host_name] = combine_vars(self._vars_cache[host_name], {varname: value})
|
||||||
|
else:
|
||||||
|
self._vars_cache[host_name][varname] = value
|
Loading…
Reference in New Issue