Make sure all plugin loaders are loaded from roles and shared correctly (v2)

pull/10870/merge
James Cammarata 10 years ago
parent 0b836262f0
commit f310d13280

@ -248,12 +248,11 @@ class ConnectionInformation:
def _get_fields(self):
return [i for i in self.__dict__.keys() if i[:1] != '_']
def post_validate(self, variables, loader):
def post_validate(self, templar):
'''
Finalizes templated values which may be set on this objects fields.
'''
templar = Templar(loader=loader, variables=variables)
for field in self._get_fields():
value = templar.template(getattr(self, field))
setattr(self, field, value)

@ -25,6 +25,7 @@ from ansible import constants as C
from ansible.errors import *
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.playbook import Playbook
from ansible.template import Templar
from ansible.utils.color import colorize, hostcolor
from ansible.utils.debug import debug
@ -80,8 +81,9 @@ class PlaybookExecutor:
# Create a temporary copy of the play here, so we can run post_validate
# on it without the templating changes affecting the original object.
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars, fail_on_undefined=False)
new_play = play.copy()
new_play.post_validate(all_vars, fail_on_undefined=False)
new_play.post_validate(templar)
if self._tqm is None:
# we are just doing a listing

@ -94,7 +94,7 @@ class WorkerProcess(multiprocessing.Process):
try:
if not self._main_q.empty():
debug("there's work to be done!")
(host, task, basedir, job_vars, connection_info, module_loader) = self._main_q.get(block=False)
(host, task, basedir, job_vars, connection_info, shared_loader_obj) = self._main_q.get(block=False)
debug("got a task/handler to work on: %s" % task)
# because the task queue manager starts workers (forks) before the
@ -115,7 +115,7 @@ class WorkerProcess(multiprocessing.Process):
# execute the task and build a TaskResult from the result
debug("running TaskExecutor() for %s/%s" % (host, task))
executor_result = TaskExecutor(host, task, job_vars, new_connection_info, self._new_stdin, self._loader, module_loader).run()
executor_result = TaskExecutor(host, task, job_vars, new_connection_info, self._new_stdin, self._loader, shared_loader_obj).run()
debug("done running TaskExecutor() for %s/%s" % (host, task))
task_result = TaskResult(host, task, executor_result)

@ -31,6 +31,7 @@ from ansible.executor.connection_info import ConnectionInformation
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.plugins import lookup_loader, connection_loader, action_loader
from ansible.template import Templar
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.unicode import to_unicode
@ -47,14 +48,14 @@ class TaskExecutor:
class.
'''
def __init__(self, host, task, job_vars, connection_info, new_stdin, loader, module_loader):
def __init__(self, host, task, job_vars, connection_info, new_stdin, loader, shared_loader_obj):
self._host = host
self._task = task
self._job_vars = job_vars
self._connection_info = connection_info
self._new_stdin = new_stdin
self._loader = loader
self._module_loader = module_loader
self._shared_loader_obj = shared_loader_obj
def run(self):
'''
@ -195,9 +196,11 @@ class TaskExecutor:
if variables is None:
variables = self._job_vars
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
self._connection_info.post_validate(variables=variables, loader=self._loader)
self._connection_info.post_validate(templar=templar)
# now that the connection information is finalized, we can add 'magic'
# variables to the variable dictionary
@ -216,7 +219,7 @@ class TaskExecutor:
return dict(changed=False, skipped=True, skip_reason='Conditional check failed')
# Now we do final validation on the task, which sets all fields to their final values
self._task.post_validate(variables)
self._task.post_validate(templar=templar)
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
@ -336,7 +339,7 @@ class TaskExecutor:
connection=self._connection,
connection_info=self._connection_info,
loader=self._loader,
module_loader=self._module_loader,
shared_loader_obj=self._shared_loader_obj,
)
time_left = self._task.async
@ -408,7 +411,7 @@ class TaskExecutor:
connection=connection,
connection_info=self._connection_info,
loader=self._loader,
module_loader=self._module_loader,
shared_loader_obj=self._shared_loader_obj,
)
if not handler:

@ -32,6 +32,7 @@ from ansible.executor.process.worker import WorkerProcess
from ansible.executor.process.result import ResultProcess
from ansible.executor.stats import AggregateStats
from ansible.plugins import callback_loader, strategy_loader
from ansible.template import Templar
from ansible.utils.debug import debug
@ -159,9 +160,10 @@ class TaskQueueManager:
'''
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars, fail_on_undefined=False)
new_play = play.copy()
new_play.post_validate(all_vars, fail_on_undefined=False)
new_play.post_validate(templar)
connection_info = ConnectionInformation(new_play, self._options, self.passwords)
for callback_plugin in self._callback_plugins:

@ -234,7 +234,7 @@ class Base:
return new_me
def post_validate(self, all_vars=dict(), fail_on_undefined=True):
def post_validate(self, templar):
'''
we can't tell that everything is of the right type until we have
all the variables. Run basic types (from isa) as well as
@ -245,8 +245,6 @@ class Base:
if self._loader is not None:
basedir = self._loader.get_basedir()
templar = Templar(loader=self._loader, variables=all_vars, fail_on_undefined=fail_on_undefined)
for (name, attribute) in iteritems(self._get_base_attributes()):
if getattr(self, name) is None:

@ -21,6 +21,7 @@ __metaclass__ = type
from six import iteritems, string_types
import inspect
import os
from hashlib import sha1
@ -36,9 +37,11 @@ from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.include import RoleInclude
from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable
from ansible.plugins import module_loader
from ansible.plugins import PluginLoader
from ansible.utils.vars import combine_vars
from ansible import plugins as ansible_plugins
__all__ = ['Role', 'ROLE_CACHE', 'hash_params']
@ -152,11 +155,15 @@ class Role(Base, Become, Conditional, Taggable):
current_tags.extend(role_include.tags)
setattr(self, 'tags', current_tags)
# load the role's files, if they exist
library = os.path.join(self._role_path, 'library')
if os.path.isdir(library):
module_loader.add_directory(library)
# dynamically load any plugins from the role directory
for name, obj in inspect.getmembers(ansible_plugins):
if isinstance(obj, PluginLoader):
if obj.subdir:
plugin_path = os.path.join(self._role_path, obj.subdir)
if os.path.isdir(plugin_path):
obj.add_directory(plugin_path)
# load the role's other files, if they exist
metadata = self._load_role_yaml('meta')
if metadata:
self._metadata = RoleMetadata.load(metadata, owner=self, loader=self._loader)

@ -177,18 +177,18 @@ class Task(Base, Conditional, Taggable, Become):
return super(Task, self).preprocess_data(new_ds)
def post_validate(self, all_vars=dict(), fail_on_undefined=True):
def post_validate(self, templar):
'''
Override of base class post_validate, to also do final validation on
the block and task include (if any) to which this task belongs.
'''
if self._block:
self._block.post_validate(all_vars=all_vars, fail_on_undefined=fail_on_undefined)
self._block.post_validate(templar)
if self._task_include:
self._task_include.post_validate(all_vars=all_vars, fail_on_undefined=fail_on_undefined)
self._task_include.post_validate(templar)
super(Task, self).post_validate(all_vars=all_vars, fail_on_undefined=fail_on_undefined)
super(Task, self).post_validate(templar)
def get_vars(self):
all_vars = self.vars.copy()

@ -44,12 +44,12 @@ class ActionBase:
action in use.
'''
def __init__(self, task, connection, connection_info, loader, module_loader):
def __init__(self, task, connection, connection_info, loader, shared_loader_obj):
self._task = task
self._connection = connection
self._connection_info = connection_info
self._loader = loader
self._module_loader = module_loader
self._shared_loader_obj = shared_loader_obj
self._shell = self.get_shell()
self._supports_check_mode = True
@ -73,9 +73,9 @@ class ActionBase:
# Search module path(s) for named module.
module_suffixes = getattr(self._connection, 'default_suffixes', None)
module_path = self._module_loader.find_plugin(module_name, module_suffixes)
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, module_suffixes)
if module_path is None:
module_path2 = self._module_loader.find_plugin('ping', module_suffixes)
module_path2 = self._shared_loader_obj.module_loader.find_plugin('ping', module_suffixes)
if module_path2 is not None:
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
else:

@ -35,7 +35,7 @@ class ActionModule(ActionBase):
result = dict(msg=self._task.args['msg'])
# FIXME: move the LOOKUP_REGEX somewhere else
elif 'var' in self._task.args: # and not utils.LOOKUP_REGEX.search(self._task.args['var']):
templar = Templar(loader=self._loader, variables=task_vars)
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=task_vars)
results = templar.template(self._task.args['var'], convert_bare=True)
result = dict()
result[self._task.args['var']] = results

@ -30,12 +30,24 @@ from ansible.inventory.group import Group
from ansible.playbook.handler import Handler
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role import ROLE_CACHE, hash_params
from ansible.plugins import module_loader
from ansible.plugins import module_loader, filter_loader, lookup_loader
from ansible.utils.debug import debug
__all__ = ['StrategyBase']
# FIXME: this should probably be in the plugins/__init__.py, with
# a smarter mechanism to set all of the attributes based on
# the loaders created there
class SharedPluginLoaderObj:
'''
A simple object to make pass the various plugin loaders to
the forked processes over the queue easier
'''
def __init__(self):
self.module_loader = module_loader
self.filter_loader = filter_loader
self.lookup_loader = lookup_loader
class StrategyBase:
@ -108,7 +120,12 @@ class StrategyBase:
self._cur_worker = 0
self._pending_results += 1
main_q.put((host, task, self._loader.get_basedir(), task_vars, connection_info, module_loader), block=False)
# create a dummy object with plugin loaders set as an easier
# way to share them with the forked processes
shared_loader_obj = SharedPluginLoaderObj()
main_q.put((host, task, self._loader.get_basedir(), task_vars, connection_info, shared_loader_obj), block=False)
except (EOFError, IOError, AssertionError) as e:
# most likely an abort
debug("got an error while queuing: %s" % e)

@ -53,12 +53,19 @@ class Templar:
The main class for templating, with the main entry-point of template().
'''
def __init__(self, loader, variables=dict(), fail_on_undefined=C.DEFAULT_UNDEFINED_VAR_BEHAVIOR):
def __init__(self, loader, shared_loader_obj=None, variables=dict(), fail_on_undefined=C.DEFAULT_UNDEFINED_VAR_BEHAVIOR):
self._loader = loader
self._basedir = loader.get_basedir()
self._filters = None
self._available_variables = variables
if shared_loader_obj:
self._filter_loader = getattr(shared_loader_obj, 'filter_loader')
self._lookup_loader = getattr(shared_loader_obj, 'lookup_loader')
else:
self._filter_loader = filter_loader
self._lookup_loader = lookup_loader
# flags to determine whether certain failures during templating
# should result in fatal errors being raised
self._fail_on_lookup_errors = True
@ -88,7 +95,7 @@ class Templar:
if self._filters is not None:
return self._filters.copy()
plugins = [x for x in filter_loader.all()]
plugins = [x for x in self._filter_loader.all()]
self._filters = dict()
for fp in plugins:
@ -205,7 +212,7 @@ class Templar:
return thing if thing is not None else ''
def _lookup(self, name, *args, **kwargs):
instance = lookup_loader.get(name.lower(), loader=self._loader)
instance = self._lookup_loader.get(name.lower(), loader=self._loader)
if instance is not None:
# safely catch run failures per #5059

Loading…
Cancel
Save