|
|
@ -31,7 +31,6 @@ DOCUMENTATION = '''
|
|
|
|
|
|
|
|
|
|
|
|
from ansible import constants as C
|
|
|
|
from ansible import constants as C
|
|
|
|
from ansible.errors import AnsibleError, AnsibleAssertionError, AnsibleParserError
|
|
|
|
from ansible.errors import AnsibleError, AnsibleAssertionError, AnsibleParserError
|
|
|
|
from ansible.executor.play_iterator import IteratingStates
|
|
|
|
|
|
|
|
from ansible.module_utils.common.text.converters import to_text
|
|
|
|
from ansible.module_utils.common.text.converters import to_text
|
|
|
|
from ansible.playbook.handler import Handler
|
|
|
|
from ansible.playbook.handler import Handler
|
|
|
|
from ansible.playbook.included_file import IncludedFile
|
|
|
|
from ansible.playbook.included_file import IncludedFile
|
|
|
@ -46,12 +45,6 @@ display = Display()
|
|
|
|
|
|
|
|
|
|
|
|
class StrategyModule(StrategyBase):
|
|
|
|
class StrategyModule(StrategyBase):
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# used for the lockstep to indicate to run handlers
|
|
|
|
|
|
|
|
self._in_handlers = False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_next_task_lockstep(self, hosts, iterator):
|
|
|
|
def _get_next_task_lockstep(self, hosts, iterator):
|
|
|
|
'''
|
|
|
|
'''
|
|
|
|
Returns a list of (host, task) tuples, where the task may
|
|
|
|
Returns a list of (host, task) tuples, where the task may
|
|
|
@ -73,52 +66,35 @@ class StrategyModule(StrategyBase):
|
|
|
|
if not state_task_per_host:
|
|
|
|
if not state_task_per_host:
|
|
|
|
return [(h, None) for h in hosts]
|
|
|
|
return [(h, None) for h in hosts]
|
|
|
|
|
|
|
|
|
|
|
|
if self._in_handlers and not any(filter(
|
|
|
|
task_uuids = {t._uuid for s, t in state_task_per_host.values()}
|
|
|
|
lambda rs: rs == IteratingStates.HANDLERS,
|
|
|
|
_loop_cnt = 0
|
|
|
|
(s.run_state for s, dummy in state_task_per_host.values()))
|
|
|
|
while _loop_cnt <= 1:
|
|
|
|
):
|
|
|
|
try:
|
|
|
|
self._in_handlers = False
|
|
|
|
cur_task = iterator.all_tasks[iterator.cur_task]
|
|
|
|
|
|
|
|
except IndexError:
|
|
|
|
if self._in_handlers:
|
|
|
|
# pick up any tasks left after clear_host_errors
|
|
|
|
lowest_cur_handler = min(
|
|
|
|
iterator.cur_task = 0
|
|
|
|
s.cur_handlers_task for s, t in state_task_per_host.values()
|
|
|
|
_loop_cnt += 1
|
|
|
|
if s.run_state == IteratingStates.HANDLERS
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
task_uuids = [t._uuid for s, t in state_task_per_host.values()]
|
|
|
|
|
|
|
|
_loop_cnt = 0
|
|
|
|
|
|
|
|
while _loop_cnt <= 1:
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
cur_task = iterator.all_tasks[iterator.cur_task]
|
|
|
|
|
|
|
|
except IndexError:
|
|
|
|
|
|
|
|
# pick up any tasks left after clear_host_errors
|
|
|
|
|
|
|
|
iterator.cur_task = 0
|
|
|
|
|
|
|
|
_loop_cnt += 1
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
iterator.cur_task += 1
|
|
|
|
|
|
|
|
if cur_task._uuid in task_uuids:
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
else:
|
|
|
|
else:
|
|
|
|
# prevent infinite loop
|
|
|
|
iterator.cur_task += 1
|
|
|
|
raise AnsibleAssertionError(
|
|
|
|
if cur_task._uuid in task_uuids:
|
|
|
|
'BUG: There seems to be a mismatch between tasks in PlayIterator and HostStates.'
|
|
|
|
break
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
|
|
|
|
# prevent infinite loop
|
|
|
|
|
|
|
|
raise AnsibleAssertionError(
|
|
|
|
|
|
|
|
'BUG: There seems to be a mismatch between tasks in PlayIterator and HostStates.'
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
host_tasks = []
|
|
|
|
host_tasks = []
|
|
|
|
for host, (state, task) in state_task_per_host.items():
|
|
|
|
for host, (state, task) in state_task_per_host.items():
|
|
|
|
if ((self._in_handlers and lowest_cur_handler == state.cur_handlers_task) or
|
|
|
|
if cur_task._uuid == task._uuid:
|
|
|
|
(not self._in_handlers and cur_task._uuid == task._uuid)):
|
|
|
|
|
|
|
|
iterator.set_state_for_host(host.name, state)
|
|
|
|
iterator.set_state_for_host(host.name, state)
|
|
|
|
host_tasks.append((host, task))
|
|
|
|
host_tasks.append((host, task))
|
|
|
|
else:
|
|
|
|
else:
|
|
|
|
host_tasks.append((host, noop_task))
|
|
|
|
host_tasks.append((host, noop_task))
|
|
|
|
|
|
|
|
|
|
|
|
# once hosts synchronize on 'flush_handlers' lockstep enters
|
|
|
|
if cur_task.action in C._ACTION_META and cur_task.args.get('_raw_params') == 'flush_handlers':
|
|
|
|
# '_in_handlers' phase where handlers are run instead of tasks
|
|
|
|
iterator.all_tasks[iterator.cur_task:iterator.cur_task] = [h for b in iterator._play.handlers for h in b.block]
|
|
|
|
# until at least one host is in IteratingStates.HANDLERS
|
|
|
|
|
|
|
|
if (not self._in_handlers and cur_task.action in C._ACTION_META and
|
|
|
|
|
|
|
|
cur_task.args.get('_raw_params') == 'flush_handlers'):
|
|
|
|
|
|
|
|
self._in_handlers = True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return host_tasks
|
|
|
|
return host_tasks
|
|
|
|
|
|
|
|
|
|
|
@ -310,7 +286,7 @@ class StrategyModule(StrategyBase):
|
|
|
|
final_block = new_block.filter_tagged_tasks(task_vars)
|
|
|
|
final_block = new_block.filter_tagged_tasks(task_vars)
|
|
|
|
display.debug("done filtering new block on tags")
|
|
|
|
display.debug("done filtering new block on tags")
|
|
|
|
|
|
|
|
|
|
|
|
included_tasks.extend(final_block.get_tasks())
|
|
|
|
included_tasks.extend(final_block.get_tasks())
|
|
|
|
|
|
|
|
|
|
|
|
for host in hosts_left:
|
|
|
|
for host in hosts_left:
|
|
|
|
if host in included_file._hosts:
|
|
|
|
if host in included_file._hosts:
|
|
|
|