AnsiballZ refactoring and remote debug support (#85289)

pull/75215/head
Matt Clay 6 months ago committed by GitHub
parent aa8d58a174
commit 45dd2c0647
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -0,0 +1,5 @@
minor_changes:
- ansiballz - Refactored AnsiballZ and module respawn.
- ansiballz - Added support for AnsiballZ extensions.
- ansiballz - Moved AnsiballZ code coverage support into an extension.
- ansiballz - Added an experimental AnsiballZ extension for remote debugging.

@ -0,0 +1,101 @@
from __future__ import annotations
import dataclasses
import json
import typing as t
from ansible.module_utils._internal._ansiballz import _extensions
from ansible.module_utils._internal._ansiballz._extensions import _pydevd, _coverage
from ansible.constants import config
_T = t.TypeVar('_T')
class ExtensionManager:
"""AnsiballZ extension manager."""
def __init__(
self,
debugger: _pydevd.Options | None = None,
coverage: _coverage.Options | None = None,
) -> None:
options = dict(
_pydevd=debugger,
_coverage=coverage,
)
self._debugger = debugger
self._coverage = coverage
self._extension_names = tuple(name for name, option in options.items() if option)
self._module_names = tuple(f'{_extensions.__name__}.{name}' for name in self._extension_names)
self.source_mapping: dict[str, str] = {}
@property
def debugger_enabled(self) -> bool:
"""Returns True if the debugger extension is enabled, otherwise False."""
return bool(self._debugger)
@property
def extension_names(self) -> tuple[str, ...]:
"""Names of extensions to include in the AnsiballZ payload."""
return self._extension_names
@property
def module_names(self) -> tuple[str, ...]:
"""Python module names of extensions to include in the AnsiballZ payload."""
return self._module_names
def get_extensions(self) -> dict[str, dict[str, object]]:
"""Return the configured extensions and their options."""
extension_options: dict[str, t.Any] = {}
if self._debugger:
extension_options['_pydevd'] = dataclasses.replace(
self._debugger,
source_mapping=self._get_source_mapping(),
)
if self._coverage:
extension_options['_coverage'] = self._coverage
extensions = {extension: dataclasses.asdict(options) for extension, options in extension_options.items()}
return extensions
def _get_source_mapping(self) -> dict[str, str]:
"""Get the source mapping, adjusting the source root as needed."""
if self._debugger.source_mapping:
source_mapping = {self._translate_path(key): value for key, value in self.source_mapping.items()}
else:
source_mapping = self.source_mapping
return source_mapping
def _translate_path(self, path: str) -> str:
"""Translate a local path to a foreign path."""
for replace, match in self._debugger.source_mapping.items():
if path.startswith(match):
return replace + path[len(match) :]
return path
@classmethod
def create(cls, task_vars: dict[str, object]) -> t.Self:
"""Create an instance using the provided task vars."""
return cls(
debugger=cls._get_options('_ANSIBALLZ_DEBUGGER_CONFIG', _pydevd.Options, task_vars),
coverage=cls._get_options('_ANSIBALLZ_COVERAGE_CONFIG', _coverage.Options, task_vars),
)
@classmethod
def _get_options(cls, name: str, config_type: type[_T], task_vars: dict[str, object]) -> _T | None:
"""Parse configuration from the named environment variable as the specified type, or None if not configured."""
if (value := config.get_config_value(name, variables=task_vars)) is None:
return None
data = json.loads(value) if isinstance(value, str) else value
options = config_type(**data)
return options

@ -37,14 +37,13 @@ _ANSIBALLZ_WRAPPER = True
def _ansiballz_main(
zipdata: str,
zip_data: str,
ansible_module: str,
module_fqn: str,
params: str,
profile: str,
date_time: datetime.datetime,
coverage_config: str | None,
coverage_output: str | None,
extensions: dict[str, dict[str, object]],
rlimit_nofile: int,
) -> None:
import os
@ -136,15 +135,14 @@ def _ansiballz_main(
# can monkeypatch the right basic
sys.path.insert(0, modlib_path)
from ansible.module_utils._internal._ansiballz import run_module
from ansible.module_utils._internal._ansiballz import _loader
run_module(
_loader.run_module(
json_params=json_params,
profile=profile,
module_fqn=module_fqn,
modlib_path=modlib_path,
coverage_config=coverage_config,
coverage_output=coverage_output,
extensions=extensions,
)
def debug(command: str, modlib_path: str, json_params: bytes) -> None:
@ -223,13 +221,14 @@ def _ansiballz_main(
with open(args_path, 'rb') as reader:
json_params = reader.read()
from ansible.module_utils._internal._ansiballz import run_module
from ansible.module_utils._internal._ansiballz import _loader
run_module(
_loader.run_module(
json_params=json_params,
profile=profile,
module_fqn=module_fqn,
modlib_path=modlib_path,
extensions=extensions,
)
else:
@ -246,13 +245,14 @@ def _ansiballz_main(
# store this in remote_tmpdir (use system tempdir instead)
# Only need to use [ansible_module]_payload_ in the temp_path until we move to zipimport
# (this helps ansible-test produce coverage stats)
temp_path = tempfile.mkdtemp(prefix='ansible_' + ansible_module + '_payload_')
# IMPORTANT: The real path must be used here to ensure a remote debugger such as PyCharm (using pydevd) can resolve paths correctly.
temp_path = os.path.realpath(tempfile.mkdtemp(prefix='ansible_' + ansible_module + '_payload_'))
try:
zipped_mod = os.path.join(temp_path, 'ansible_' + ansible_module + '_payload.zip')
with open(zipped_mod, 'wb') as modlib:
modlib.write(base64.b64decode(zipdata))
modlib.write(base64.b64decode(zip_data))
if len(sys.argv) == 2:
debug(sys.argv[1], zipped_mod, encoded_params)

@ -1,6 +1,26 @@
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
_ANSIBALLZ_COVERAGE_CONFIG:
name: Configure the AnsiballZ code coverage extension
description:
- Enables and configures the AnsiballZ code coverage extension.
- This is for internal use only.
env:
- {name: _ANSIBLE_ANSIBALLZ_COVERAGE_CONFIG}
vars:
- {name: _ansible_ansiballz_coverage_config}
version_added: '2.19'
_ANSIBALLZ_DEBUGGER_CONFIG:
name: Configure the AnsiballZ remote debugging extension
description:
- Enables and configures the AnsiballZ remote debugging extension.
- This is for internal use only.
env:
- {name: _ANSIBLE_ANSIBALLZ_DEBUGGER_CONFIG}
vars:
- {name: _ansible_ansiballz_debugger_config}
version_added: '2.19'
_ANSIBLE_CONNECTION_PATH:
env:
- name: _ANSIBLE_CONNECTION_PATH

@ -37,6 +37,8 @@ from ast import AST, Import, ImportFrom
from io import BytesIO
from ansible._internal import _locking
from ansible._internal._ansiballz import _builder
from ansible._internal import _ansiballz
from ansible._internal._datatag import _utils
from ansible.module_utils._internal import _dataclass_validation
from ansible.module_utils.common.yaml import yaml_load
@ -54,7 +56,8 @@ from ansible.plugins.loader import module_utils_loader
from ansible._internal._templating._engine import TemplateOptions, TemplateEngine
from ansible.template import Templar
from ansible.utils.collection_loader._collection_finder import _get_collection_metadata, _nested_dict_get
from ansible.module_utils._internal import _json, _ansiballz
from ansible.module_utils._internal import _json
from ansible.module_utils._internal._ansiballz import _loader
from ansible.module_utils import basic as _basic
if t.TYPE_CHECKING:
@ -117,7 +120,7 @@ def _strip_comments(source: str) -> str:
def _read_ansiballz_code() -> str:
code = (pathlib.Path(__file__).parent.parent / '_internal/_ansiballz.py').read_text()
code = (pathlib.Path(_ansiballz.__file__).parent / '_wrapper.py').read_text()
if not C.DEFAULT_KEEP_REMOTE_FILES:
# Keep comments when KEEP_REMOTE_FILES is set. That way users will see
@ -709,7 +712,14 @@ def _get_module_metadata(module: ast.Module) -> ModuleMetadata:
return metadata
def recursive_finder(name: str, module_fqn: str, module_data: str | bytes, zf: zipfile.ZipFile, date_time: datetime.datetime) -> ModuleMetadata:
def recursive_finder(
name: str,
module_fqn: str,
module_data: str | bytes,
zf: zipfile.ZipFile,
date_time: datetime.datetime,
extension_manager: _builder.ExtensionManager,
) -> ModuleMetadata:
"""
Using ModuleDepFinder, make sure we have all of the module_utils files that
the module and its module_utils files needs. (no longer actually recursive)
@ -755,12 +765,14 @@ def recursive_finder(name: str, module_fqn: str, module_data: str | bytes, zf: z
# include module_utils that are always required
modules_to_process.extend((
_ModuleUtilsProcessEntry.from_module(_ansiballz),
_ModuleUtilsProcessEntry.from_module(_loader),
_ModuleUtilsProcessEntry.from_module(_basic),
_ModuleUtilsProcessEntry.from_module_name(_json.get_module_serialization_profile_module_name(profile, True)),
_ModuleUtilsProcessEntry.from_module_name(_json.get_module_serialization_profile_module_name(profile, False)),
))
modules_to_process.extend(_ModuleUtilsProcessEntry.from_module_name(name) for name in extension_manager.module_names)
module_info: ModuleUtilLocatorBase
# we'll be adding new modules inline as we discover them, so just keep going til we've processed them all
@ -815,12 +827,13 @@ def recursive_finder(name: str, module_fqn: str, module_data: str | bytes, zf: z
modules_to_process.append(_ModuleUtilsProcessEntry(normalized_name, False, module_info.redirected, is_optional=entry.is_optional))
for py_module_name in py_module_cache:
py_module_file_name = py_module_cache[py_module_name][1]
source_code, py_module_file_name = py_module_cache[py_module_name]
zf.writestr(_make_zinfo(py_module_file_name, date_time, zf=zf), source_code)
if extension_manager.debugger_enabled and (origin := Origin.get_tag(source_code)) and origin.path:
extension_manager.source_mapping[origin.path] = py_module_file_name
zf.writestr(
_make_zinfo(py_module_file_name, date_time, zf=zf),
py_module_cache[py_module_name][0]
)
mu_file = to_text(py_module_file_name, errors='surrogate_or_strict')
display.vvvvv("Including module_utils file %s" % mu_file)
@ -879,17 +892,27 @@ def _get_ansible_module_fqn(module_path):
return remote_module_fqn
def _add_module_to_zip(zf: zipfile.ZipFile, date_time: datetime.datetime, remote_module_fqn: str, b_module_data: bytes) -> None:
def _add_module_to_zip(
zf: zipfile.ZipFile,
date_time: datetime.datetime,
remote_module_fqn: str,
b_module_data: bytes,
module_path: str,
extension_manager: _builder.ExtensionManager,
) -> None:
"""Add a module from ansible or from an ansible collection into the module zip"""
module_path_parts = remote_module_fqn.split('.')
# Write the module
module_path = '/'.join(module_path_parts) + '.py'
zip_module_path = '/'.join(module_path_parts) + '.py'
zf.writestr(
_make_zinfo(module_path, date_time, zf=zf),
_make_zinfo(zip_module_path, date_time, zf=zf),
b_module_data
)
if extension_manager.debugger_enabled:
extension_manager.source_mapping[module_path] = zip_module_path
existing_paths: frozenset[str]
# Write the __init__.py's necessary to get there
@ -932,6 +955,8 @@ class _CachedModule:
zip_data: bytes
metadata: ModuleMetadata
source_mapping: dict[str, str]
"""A mapping of controller absolute source locations to target relative source locations within the AnsiballZ payload."""
def dump(self, path: str) -> None:
temp_path = pathlib.Path(path + '-part')
@ -1029,6 +1054,7 @@ def _find_module_utils(
if module_substyle == 'python':
date_time = datetime.datetime.now(datetime.timezone.utc)
if date_time.year < 1980:
raise AnsibleError(f'Cannot create zipfile due to pre-1980 configured date: {date_time}')
@ -1038,19 +1064,19 @@ def _find_module_utils(
display.warning(u'Bad module compression string specified: %s. Using ZIP_STORED (no compression)' % module_compression)
compression_method = zipfile.ZIP_STORED
extension_manager = _builder.ExtensionManager.create(task_vars=task_vars)
extension_key = '~'.join(extension_manager.extension_names) if extension_manager.extension_names else 'none'
lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ansiballz_cache') # type: ignore[attr-defined]
cached_module_filename = os.path.join(lookup_path, "%s-%s" % (remote_module_fqn, module_compression))
cached_module_filename = os.path.join(lookup_path, '-'.join((remote_module_fqn, module_compression, extension_key)))
os.makedirs(os.path.dirname(cached_module_filename), exist_ok=True)
zipdata: bytes | None = None
module_metadata: ModuleMetadata | None = None
cached_module: _CachedModule | None = None
# Optimization -- don't lock if the module has already been cached
if os.path.exists(cached_module_filename):
display.debug('ANSIBALLZ: using cached module: %s' % cached_module_filename)
cached_module = _CachedModule.load(cached_module_filename)
zipdata, module_metadata = cached_module.zip_data, cached_module.metadata
else:
display.debug('ANSIBALLZ: Acquiring lock')
lock_path = f'{cached_module_filename}.lock'
@ -1065,24 +1091,31 @@ def _find_module_utils(
zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
# walk the module imports, looking for module_utils to send- they'll be added to the zipfile
module_metadata = recursive_finder(module_name, remote_module_fqn, Origin(path=module_path).tag(b_module_data), zf, date_time)
module_metadata = recursive_finder(
module_name,
remote_module_fqn,
Origin(path=module_path).tag(b_module_data),
zf,
date_time,
extension_manager,
)
display.debug('ANSIBALLZ: Writing module into payload')
_add_module_to_zip(zf, date_time, remote_module_fqn, b_module_data)
_add_module_to_zip(zf, date_time, remote_module_fqn, b_module_data, module_path, extension_manager)
zf.close()
zipdata = base64.b64encode(zipoutput.getvalue())
zip_data = base64.b64encode(zipoutput.getvalue())
# Write the assembled module to a temp file (write to temp
# so that no one looking for the file reads a partially
# written file)
os.makedirs(lookup_path, exist_ok=True)
display.debug('ANSIBALLZ: Writing module')
cached_module = _CachedModule(zip_data=zipdata, metadata=module_metadata)
cached_module = _CachedModule(zip_data=zip_data, metadata=module_metadata, source_mapping=extension_manager.source_mapping)
cached_module.dump(cached_module_filename)
display.debug('ANSIBALLZ: Done creating module')
if not zipdata:
if not cached_module:
display.debug('ANSIBALLZ: Reading module after lock')
# Another process wrote the file while we were waiting for
# the write lock. Go ahead and read the data from disk
@ -1093,8 +1126,6 @@ def _find_module_utils(
raise AnsibleError('A different worker process failed to create module file. '
'Look at traceback for that process for debugging information.') from ex
zipdata, module_metadata = cached_module.zip_data, cached_module.metadata
o_interpreter, o_args = _extract_interpreter(b_module_data)
if o_interpreter is None:
o_interpreter = u'/usr/bin/python'
@ -1107,40 +1138,36 @@ def _find_module_utils(
if not isinstance(rlimit_nofile, int):
rlimit_nofile = int(templar._engine.template(rlimit_nofile, options=TemplateOptions(value_for_omit=0)))
coverage_config = os.environ.get('_ANSIBLE_COVERAGE_CONFIG')
if coverage_config:
coverage_output = os.environ['_ANSIBLE_COVERAGE_OUTPUT']
else:
coverage_output = None
if not isinstance(module_metadata, ModuleMetadataV1):
if not isinstance(cached_module.metadata, ModuleMetadataV1):
raise NotImplementedError()
params = dict(ANSIBLE_MODULE_ARGS=module_args,)
encoder = get_module_encoder(module_metadata.serialization_profile, Direction.CONTROLLER_TO_MODULE)
encoder = get_module_encoder(cached_module.metadata.serialization_profile, Direction.CONTROLLER_TO_MODULE)
try:
encoded_params = json.dumps(params, cls=encoder)
except TypeError as ex:
raise AnsibleError(f'Failed to serialize arguments for the {module_name!r} module.') from ex
extension_manager.source_mapping = cached_module.source_mapping
code = _get_ansiballz_code(shebang)
args = dict(
zipdata=to_text(zipdata),
ansible_module=module_name,
module_fqn=remote_module_fqn,
params=encoded_params,
profile=module_metadata.serialization_profile,
profile=cached_module.metadata.serialization_profile,
date_time=date_time,
coverage_config=coverage_config,
coverage_output=coverage_output,
rlimit_nofile=rlimit_nofile,
params=encoded_params,
extensions=extension_manager.get_extensions(),
zip_data=to_text(cached_module.zip_data),
)
args_string = '\n'.join(f'{key}={value!r},' for key, value in args.items())
wrapper = f"""{code}
if __name__ == "__main__":
_ansiballz_main(
{args_string}
@ -1149,6 +1176,7 @@ if __name__ == "__main__":
output.write(to_bytes(wrapper))
module_metadata = cached_module.metadata
b_module_data = output.getvalue()
elif module_substyle == 'powershell':

@ -0,0 +1,45 @@
from __future__ import annotations
import atexit
import dataclasses
import importlib.util
import os
import sys
import typing as t
@dataclasses.dataclass(frozen=True)
class Options:
"""Code coverage options."""
config: str
output: str | None
def run(args: dict[str, t.Any]) -> None: # pragma: nocover
"""Bootstrap `coverage` for the current Ansible module invocation."""
options = Options(**args)
if options.output:
# Enable code coverage analysis of the module.
# This feature is for internal testing and may change without notice.
python_version_string = '.'.join(str(v) for v in sys.version_info[:2])
os.environ['COVERAGE_FILE'] = f'{options.output}=python-{python_version_string}=coverage'
import coverage
cov = coverage.Coverage(config_file=options.config)
def atexit_coverage() -> None:
cov.stop()
cov.save()
atexit.register(atexit_coverage)
cov.start()
else:
# Verify coverage is available without importing it.
# This will detect when a module would fail with coverage enabled with minimal overhead.
if importlib.util.find_spec('coverage') is None:
raise RuntimeError('Could not find the `coverage` Python module.')

@ -0,0 +1,62 @@
"""
Remote debugging support for AnsiballZ modules.
To use with PyCharm:
1) Choose an available port for PyCharm to listen on (e.g. 5678).
2) Create a Python Debug Server using that port.
3) Start the Python Debug Server.
4) Ensure the correct version of `pydevd-pycharm` is installed for the interpreter(s) which will run the code being debugged.
5) Configure Ansible with the `_ANSIBALLZ_DEBUGGER_CONFIG` option.
See `Options` below for the structure of the debugger configuration.
Example configuration using an environment variable:
export _ANSIBLE_ANSIBALLZ_DEBUGGER_CONFIG='{"module": "pydevd_pycharm", "settrace": {"host": "localhost", "port": 5678, "suspend": false}}'
6) Set any desired breakpoints.
7) Run Ansible commands.
A similar process should work for other pydevd based debuggers, such as Visual Studio Code, but they have not been tested.
"""
from __future__ import annotations
import dataclasses
import importlib
import json
import os
import pathlib
import typing as t
@dataclasses.dataclass(frozen=True)
class Options:
"""Debugger options for pydevd and its derivatives."""
module: str = 'pydevd'
"""The Python module which will be imported and which provides the `settrace` method."""
settrace: dict[str, object] = dataclasses.field(default_factory=dict)
"""The options to pass to the `{module}.settrace` method."""
source_mapping: dict[str, str] = dataclasses.field(default_factory=dict)
"""
A mapping of source paths to provide to pydevd.
This setting is used internally by AnsiballZ and is not required unless Ansible CLI commands are run from a different system than your IDE.
In that scenario, use this setting instead of configuring source mapping in your IDE.
The key is a path known to the IDE.
The value is the same path as known to the Ansible CLI.
Both file paths and directories are supported.
"""
def run(args: dict[str, t.Any]) -> None: # pragma: nocover
"""Enable remote debugging."""
options = Options(**args)
temp_dir = pathlib.Path(__file__).parent.parent.parent.parent.parent.parent
path_mapping = [[key, str(temp_dir / value)] for key, value in options.source_mapping.items()]
os.environ['PATHS_FROM_ECLIPSE_TO_PYTHON'] = json.dumps(path_mapping)
debugging_module = importlib.import_module(options.module)
debugging_module.settrace(**options.settrace)
pass # when suspend is True, execution pauses here -- it's also a convenient place to put a breakpoint

@ -5,17 +5,15 @@
from __future__ import annotations
import atexit
import importlib.util
import importlib
import json
import os
import runpy
import sys
import typing as t
from . import _errors, _traceback, _messages
from .. import basic
from ..common.json import get_module_encoder, Direction
from ansible.module_utils import basic
from ansible.module_utils._internal import _errors, _traceback, _messages, _ansiballz
from ansible.module_utils.common.json import get_module_encoder, Direction
def run_module(
@ -24,13 +22,16 @@ def run_module(
profile: str,
module_fqn: str,
modlib_path: str,
extensions: dict[str, dict[str, object]],
init_globals: dict[str, t.Any] | None = None,
coverage_config: str | None = None,
coverage_output: str | None = None,
) -> None: # pragma: nocover
"""Used internally by the AnsiballZ wrapper to run an Ansible module."""
try:
_enable_coverage(coverage_config, coverage_output)
for extension, args in extensions.items():
# importing _ansiballz instead of _extensions avoids an unnecessary import when extensions are not in use
extension_module = importlib.import_module(f'{_ansiballz.__name__}._extensions.{extension}')
extension_module.run(args)
_run_module(
json_params=json_params,
profile=profile,
@ -42,35 +43,6 @@ def run_module(
_handle_exception(ex, profile)
def _enable_coverage(coverage_config: str | None, coverage_output: str | None) -> None: # pragma: nocover
"""Bootstrap `coverage` for the current Ansible module invocation."""
if not coverage_config:
return
if coverage_output:
# Enable code coverage analysis of the module.
# This feature is for internal testing and may change without notice.
python_version_string = '.'.join(str(v) for v in sys.version_info[:2])
os.environ['COVERAGE_FILE'] = f'{coverage_output}=python-{python_version_string}=coverage'
import coverage
cov = coverage.Coverage(config_file=coverage_config)
def atexit_coverage():
cov.stop()
cov.save()
atexit.register(atexit_coverage)
cov.start()
else:
# Verify coverage is available without importing it.
# This will detect when a module would fail with coverage enabled with minimal overhead.
if importlib.util.find_spec('coverage') is None:
raise RuntimeError('Could not find the `coverage` Python module.')
def _run_module(
*,
json_params: bytes,

@ -0,0 +1,32 @@
from __future__ import annotations
import inspect
import sys
from ... import basic
from . import _respawn_wrapper
def create_payload() -> str:
"""Create and return an AnsiballZ payload for respawning a module."""
main = sys.modules['__main__']
code = inspect.getsource(_respawn_wrapper)
args = dict(
module_fqn=main._module_fqn,
modlib_path=main._modlib_path,
profile=basic._ANSIBLE_PROFILE,
json_params=basic._ANSIBLE_ARGS,
)
args_string = '\n'.join(f'{key}={value!r},' for key, value in args.items())
wrapper = f"""{code}
if __name__ == "__main__":
_respawn_main(
{args_string}
)
"""
return wrapper

@ -0,0 +1,23 @@
from __future__ import annotations
def _respawn_main(
json_params: bytes,
profile: str,
module_fqn: str,
modlib_path: str,
) -> None:
import sys
sys.path.insert(0, modlib_path)
from ansible.module_utils._internal._ansiballz import _loader
_loader.run_module(
json_params=json_params,
profile=profile,
module_fqn=module_fqn,
modlib_path=modlib_path,
extensions={},
init_globals=dict(_respawned=True),
)

@ -10,6 +10,7 @@ import sys
import typing as t
from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils._internal._ansiballz import _respawn
_ANSIBLE_PARENT_PATH = pathlib.Path(__file__).parents[3]
@ -39,7 +40,7 @@ def respawn_module(interpreter_path) -> t.NoReturn:
raise Exception('module has already been respawned')
# FUTURE: we need a safe way to log that a respawn has occurred for forensic/debug purposes
payload = _create_payload()
payload = _respawn.create_payload()
stdin_read, stdin_write = os.pipe()
os.write(stdin_write, to_bytes(payload))
os.close(stdin_write)
@ -59,10 +60,12 @@ def probe_interpreters_for_module(interpreter_paths, module_name):
:arg module_name: fully-qualified Python module name to probe for (for example, ``selinux``)
"""
PYTHONPATH = os.getenv('PYTHONPATH', '')
env = os.environ.copy()
env.update({
'PYTHONPATH': f'{_ANSIBLE_PARENT_PATH}:{PYTHONPATH}'.rstrip(': ')
})
for interpreter_path in interpreter_paths:
if not os.path.exists(interpreter_path):
continue
@ -81,43 +84,3 @@ def probe_interpreters_for_module(interpreter_paths, module_name):
continue
return None
def _create_payload():
# FIXME: move this into _ansiballz and skip the template
from ansible.module_utils import basic
module_fqn = sys.modules['__main__']._module_fqn
modlib_path = sys.modules['__main__']._modlib_path
respawn_code_template = """
if __name__ == '__main__':
import runpy
import sys
json_params = {json_params!r}
profile = {profile!r}
module_fqn = {module_fqn!r}
modlib_path = {modlib_path!r}
sys.path.insert(0, modlib_path)
from ansible.module_utils._internal import _ansiballz
_ansiballz.run_module(
json_params=json_params,
profile=profile,
module_fqn=module_fqn,
modlib_path=modlib_path,
init_globals=dict(_respawned=True),
)
"""
respawn_code = respawn_code_template.format(
json_params=basic._ANSIBLE_ARGS,
profile=basic._ANSIBLE_PROFILE,
module_fqn=module_fqn,
modlib_path=modlib_path,
)
return respawn_code

@ -0,0 +1,3 @@
shippable/posix/group5
context/controller
gather_facts/no

@ -0,0 +1,13 @@
- name: Run a module with remote debugging configured to use a bogus debugger module
ping:
vars:
_ansible_ansiballz_debugger_config:
module: not_a_valid_debugger_module
register: result
ignore_errors: yes
- name: Verify the module failed due to not being able to import the bogus debugger module
assert:
that:
- result is failed
- result.msg is contains "No module named 'not_a_valid_debugger_module'"

@ -3,6 +3,7 @@
from __future__ import annotations
import abc
import json
import os
import shutil
import tempfile
@ -240,9 +241,13 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
# cause the 'coverage' module to be found, but not imported or enabled
coverage_file = ''
coverage_options = dict(
config=config_file,
output=coverage_file,
)
variables = dict(
_ANSIBLE_COVERAGE_CONFIG=config_file,
_ANSIBLE_COVERAGE_OUTPUT=coverage_file,
_ANSIBLE_ANSIBALLZ_COVERAGE_CONFIG=json.dumps(coverage_options),
)
return variables

@ -14,6 +14,7 @@ from io import BytesIO
import ansible.errors
from ansible._internal._ansiballz._builder import ExtensionManager
from ansible.executor.module_common import recursive_finder
from ansible.plugins.loader import init_plugin_loader
@ -27,7 +28,8 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
'ansible/module_utils/basic.py',
'ansible/module_utils/six/__init__.py',
'ansible/module_utils/_internal/__init__.py',
'ansible/module_utils/_internal/_ansiballz.py',
'ansible/module_utils/_internal/_ansiballz/__init__.py',
'ansible/module_utils/_internal/_ansiballz/_loader.py',
'ansible/module_utils/_internal/_dataclass_validation.py',
'ansible/module_utils/_internal/_datatag/__init__.py',
'ansible/module_utils/_internal/_datatag/_tags.py',
@ -99,7 +101,7 @@ def zip_file() -> zipfile.ZipFile:
def test_no_module_utils(zip_file: zipfile.ZipFile) -> None:
name = 'ping'
data = b'#!/usr/bin/python\nreturn \'{\"changed\": false}\''
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, zip_file, NOW)
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, zip_file, NOW, ExtensionManager())
assert frozenset(zip_file.namelist()) == MODULE_UTILS_BASIC_FILES
@ -107,7 +109,7 @@ def test_module_utils_with_syntax_error(zip_file: zipfile.ZipFile) -> None:
name = 'fake_module'
data = b'#!/usr/bin/python\ndef something(:\n pass\n'
with pytest.raises(ansible.errors.AnsibleError) as exec_info:
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'fake_module.py'), data, zip_file, NOW)
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'fake_module.py'), data, zip_file, NOW, ExtensionManager())
assert "Unable to compile 'fake_module': invalid syntax" in str(exec_info.value)
@ -115,26 +117,26 @@ def test_module_utils_with_identation_error(zip_file: zipfile.ZipFile) -> None:
name = 'fake_module'
data = b'#!/usr/bin/python\n def something():\n pass\n'
with pytest.raises(ansible.errors.AnsibleError) as exec_info:
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'fake_module.py'), data, zip_file, NOW)
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'fake_module.py'), data, zip_file, NOW, ExtensionManager())
assert "Unable to compile 'fake_module': unexpected indent" in str(exec_info.value)
def test_from_import_six(zip_file: zipfile.ZipFile) -> None:
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils import six'
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, zip_file, NOW)
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, zip_file, NOW, ExtensionManager())
assert frozenset(zip_file.namelist()) == frozenset(('ansible/module_utils/six/__init__.py', )).union(MODULE_UTILS_BASIC_FILES)
def test_import_six(zip_file: zipfile.ZipFile) -> None:
name = 'ping'
data = b'#!/usr/bin/python\nimport ansible.module_utils.six'
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, zip_file, NOW)
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, zip_file, NOW, ExtensionManager())
assert frozenset(zip_file.namelist()) == frozenset(('ansible/module_utils/six/__init__.py', )).union(MODULE_UTILS_BASIC_FILES)
def test_import_six_from_many_submodules(zip_file: zipfile.ZipFile) -> None:
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils.six.moves.urllib.parse import urlparse'
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, zip_file, NOW)
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, zip_file, NOW, ExtensionManager())
assert frozenset(zip_file.namelist()) == frozenset(('ansible/module_utils/six/__init__.py',)).union(MODULE_UTILS_BASIC_FILES)

Loading…
Cancel
Save