mirror of https://github.com/ansible/ansible.git
Omit pre-built man pages from sdist (#81395)
Since man pages aren't accessible to users after a `pip install`, there's no need to include them in the sdist. This change makes it trivial to build man pages from source, which makes them much easier to iterate on. It also simplifies creation and testing of the sdist, since it no longer requires building man pages. The new `packaging/cli-doc/build.py` script can generate both man pages and RST documentation. This supports inclusion on the docs site without a dependency on `ansible-core` internals. Having a single implementation for both simplifies keeping the two formats in sync.pull/81398/head
parent
6d1f85bbe9
commit
691c8e8603
@ -0,0 +1,4 @@
|
||||
minor_changes:
|
||||
- The ``ansible-core`` sdist no longer contains pre-generated man pages.
|
||||
Instead, a ``packaging/cli-doc/build.py`` script is included in the sdist.
|
||||
This script can generate man pages and standalone RST documentation for ``ansible-core`` CLI programs.
|
@ -0,0 +1,279 @@
|
||||
#!/usr/bin/env python
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
"""Build documentation for ansible-core CLI programs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import dataclasses
|
||||
import importlib
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
import typing as t
|
||||
import warnings
|
||||
|
||||
import jinja2
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from ansible.cli import CLI # pragma: nocover
|
||||
|
||||
SCRIPT_DIR = pathlib.Path(__file__).resolve().parent
|
||||
SOURCE_DIR = SCRIPT_DIR.parent.parent
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main program entry point."""
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
subparsers = parser.add_subparsers(required=True, metavar='command')
|
||||
|
||||
man_parser = subparsers.add_parser('man', description=build_man.__doc__, help=build_man.__doc__)
|
||||
man_parser.add_argument('--output-dir', required=True, type=pathlib.Path, metavar='DIR', help='output directory')
|
||||
man_parser.add_argument('--template-file', default=SCRIPT_DIR / 'man.j2', type=pathlib.Path, metavar='FILE', help='template file')
|
||||
man_parser.set_defaults(func=build_man)
|
||||
|
||||
rst_parser = subparsers.add_parser('rst', description=build_rst.__doc__, help=build_rst.__doc__)
|
||||
rst_parser.add_argument('--output-dir', required=True, type=pathlib.Path, metavar='DIR', help='output directory')
|
||||
rst_parser.add_argument('--template-file', default=SCRIPT_DIR / 'rst.j2', type=pathlib.Path, metavar='FILE', help='template file')
|
||||
rst_parser.set_defaults(func=build_rst)
|
||||
|
||||
json_parser = subparsers.add_parser('json', description=build_json.__doc__, help=build_json.__doc__)
|
||||
json_parser.add_argument('--output-file', required=True, type=pathlib.Path, metavar='FILE', help='output file')
|
||||
json_parser.set_defaults(func=build_json)
|
||||
|
||||
try:
|
||||
# noinspection PyUnresolvedReferences
|
||||
import argcomplete
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
argcomplete.autocomplete(parser)
|
||||
|
||||
args = parser.parse_args()
|
||||
kwargs = {name: getattr(args, name) for name in inspect.signature(args.func).parameters}
|
||||
|
||||
sys.path.insert(0, str(SOURCE_DIR / 'lib'))
|
||||
|
||||
args.func(**kwargs)
|
||||
|
||||
|
||||
def build_man(output_dir: pathlib.Path, template_file: pathlib.Path) -> None:
|
||||
"""Build man pages for ansible-core CLI programs."""
|
||||
if not template_file.resolve().is_relative_to(SCRIPT_DIR):
|
||||
warnings.warn("Custom templates are intended for debugging purposes only. The data model may change in future releases without notice.")
|
||||
|
||||
import docutils.core
|
||||
import docutils.writers.manpage
|
||||
|
||||
output_dir.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
for cli_name, source in generate_rst(template_file).items():
|
||||
with io.StringIO(source) as source_file:
|
||||
docutils.core.publish_file(
|
||||
source=source_file,
|
||||
destination_path=output_dir / f'{cli_name}.1',
|
||||
writer=docutils.writers.manpage.Writer(),
|
||||
)
|
||||
|
||||
|
||||
def build_rst(output_dir: pathlib.Path, template_file: pathlib.Path) -> None:
|
||||
"""Build RST documentation for ansible-core CLI programs."""
|
||||
if not template_file.resolve().is_relative_to(SCRIPT_DIR):
|
||||
warnings.warn("Custom templates are intended for debugging purposes only. The data model may change in future releases without notice.")
|
||||
|
||||
output_dir.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
for cli_name, source in generate_rst(template_file).items():
|
||||
(output_dir / f'{cli_name}.rst').write_text(source)
|
||||
|
||||
|
||||
def build_json(output_file: pathlib.Path) -> None:
|
||||
"""Build JSON documentation for ansible-core CLI programs."""
|
||||
warnings.warn("JSON output is intended for debugging purposes only. The data model may change in future releases without notice.")
|
||||
|
||||
output_file.parent.mkdir(exist_ok=True, parents=True)
|
||||
output_file.write_text(json.dumps(collect_programs(), indent=4))
|
||||
|
||||
|
||||
def generate_rst(template_file: pathlib.Path) -> dict[str, str]:
|
||||
"""Generate RST pages using the provided template."""
|
||||
results: dict[str, str] = {}
|
||||
|
||||
for cli_name, template_vars in collect_programs().items():
|
||||
env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_file.parent))
|
||||
template = env.get_template(template_file.name)
|
||||
results[cli_name] = template.render(template_vars)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def collect_programs() -> dict[str, dict[str, t.Any]]:
|
||||
"""Return information about CLI programs."""
|
||||
programs: list[tuple[str, dict[str, t.Any]]] = []
|
||||
cli_bin_name_list: list[str] = []
|
||||
|
||||
for source_file in (SOURCE_DIR / 'lib/ansible/cli').glob('*.py'):
|
||||
if source_file.name != '__init__.py':
|
||||
programs.append(generate_options_docs(source_file, cli_bin_name_list))
|
||||
|
||||
return dict(programs)
|
||||
|
||||
|
||||
def generate_options_docs(source_file: pathlib.Path, cli_bin_name_list: list[str]) -> tuple[str, dict[str, t.Any]]:
|
||||
"""Generate doc structure from CLI module options."""
|
||||
import ansible.release
|
||||
|
||||
if str(source_file).endswith('/lib/ansible/cli/adhoc.py'):
|
||||
cli_name = 'ansible'
|
||||
cli_class_name = 'AdHocCLI'
|
||||
cli_module_fqn = 'ansible.cli.adhoc'
|
||||
else:
|
||||
cli_module_name = source_file.with_suffix('').name
|
||||
cli_name = f'ansible-{cli_module_name}'
|
||||
cli_class_name = f'{cli_module_name.capitalize()}CLI'
|
||||
cli_module_fqn = f'ansible.cli.{cli_module_name}'
|
||||
|
||||
cli_bin_name_list.append(cli_name)
|
||||
|
||||
cli_module = importlib.import_module(cli_module_fqn)
|
||||
cli_class: type[CLI] = getattr(cli_module, cli_class_name)
|
||||
|
||||
cli = cli_class([cli_name])
|
||||
cli.init_parser()
|
||||
|
||||
parser: argparse.ArgumentParser = cli.parser
|
||||
long_desc = cli.__doc__
|
||||
arguments: dict[str, str] | None = getattr(cli, 'ARGUMENTS', None)
|
||||
|
||||
action_docs = get_action_docs(parser)
|
||||
option_names: tuple[str, ...] = tuple(itertools.chain.from_iterable(opt.options for opt in action_docs))
|
||||
actions: dict[str, dict[str, t.Any]] = {}
|
||||
|
||||
content_depth = populate_subparser_actions(parser, option_names, actions)
|
||||
|
||||
docs = dict(
|
||||
version=ansible.release.__version__,
|
||||
source=str(source_file.relative_to(SOURCE_DIR)),
|
||||
cli_name=cli_name,
|
||||
usage=parser.format_usage(),
|
||||
short_desc=parser.description,
|
||||
long_desc=trim_docstring(long_desc),
|
||||
actions=actions,
|
||||
options=[item.__dict__ for item in action_docs],
|
||||
arguments=arguments,
|
||||
option_names=option_names,
|
||||
cli_bin_name_list=cli_bin_name_list,
|
||||
content_depth=content_depth,
|
||||
inventory='-i' in option_names,
|
||||
library='-M' in option_names,
|
||||
)
|
||||
|
||||
return cli_name, docs
|
||||
|
||||
|
||||
def populate_subparser_actions(parser: argparse.ArgumentParser, shared_option_names: tuple[str, ...], actions: dict[str, dict[str, t.Any]]) -> int:
|
||||
"""Generate doc structure from CLI module subparser options."""
|
||||
try:
|
||||
# noinspection PyProtectedMember
|
||||
subparsers: dict[str, argparse.ArgumentParser] = parser._subparsers._group_actions[0].choices # type: ignore
|
||||
except AttributeError:
|
||||
subparsers = {}
|
||||
|
||||
depth = 0
|
||||
|
||||
for subparser_action, subparser in subparsers.items():
|
||||
subparser_option_names: set[str] = set()
|
||||
subparser_action_docs: set[ActionDoc] = set()
|
||||
subparser_actions: dict[str, dict[str, t.Any]] = {}
|
||||
|
||||
for action_doc in get_action_docs(subparser):
|
||||
for option_alias in action_doc.options:
|
||||
if option_alias in shared_option_names:
|
||||
continue
|
||||
|
||||
subparser_option_names.add(option_alias)
|
||||
subparser_action_docs.add(action_doc)
|
||||
|
||||
depth = populate_subparser_actions(subparser, shared_option_names, subparser_actions)
|
||||
|
||||
actions[subparser_action] = dict(
|
||||
option_names=list(subparser_option_names),
|
||||
options=[item.__dict__ for item in subparser_action_docs],
|
||||
actions=subparser_actions,
|
||||
name=subparser_action,
|
||||
desc=trim_docstring(subparser.get_default("func").__doc__),
|
||||
)
|
||||
|
||||
return depth + 1
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ActionDoc:
|
||||
"""Documentation for an action."""
|
||||
desc: str | None
|
||||
options: tuple[str, ...]
|
||||
arg: str | None
|
||||
|
||||
|
||||
def get_action_docs(parser: argparse.ArgumentParser) -> list[ActionDoc]:
|
||||
"""Get action documentation from the given argument parser."""
|
||||
action_docs = []
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
for action in parser._actions:
|
||||
if action.help == argparse.SUPPRESS:
|
||||
continue
|
||||
|
||||
# noinspection PyProtectedMember, PyUnresolvedReferences
|
||||
args = action.dest.upper() if isinstance(action, argparse._StoreAction) else None
|
||||
|
||||
if args or action.option_strings:
|
||||
action_docs.append(ActionDoc(
|
||||
desc=action.help,
|
||||
options=tuple(action.option_strings),
|
||||
arg=args,
|
||||
))
|
||||
|
||||
return action_docs
|
||||
|
||||
|
||||
def trim_docstring(docstring: str | None) -> str:
|
||||
"""Trim and return the given docstring using the implementation from https://peps.python.org/pep-0257/#handling-docstring-indentation."""
|
||||
if not docstring:
|
||||
return '' # pragma: nocover
|
||||
|
||||
# Convert tabs to spaces (following the normal Python rules) and split into a list of lines
|
||||
lines = docstring.expandtabs().splitlines()
|
||||
|
||||
# Determine minimum indentation (first line doesn't count)
|
||||
indent = sys.maxsize
|
||||
|
||||
for line in lines[1:]:
|
||||
stripped = line.lstrip()
|
||||
|
||||
if stripped:
|
||||
indent = min(indent, len(line) - len(stripped))
|
||||
|
||||
# Remove indentation (first line is special)
|
||||
trimmed = [lines[0].strip()]
|
||||
|
||||
if indent < sys.maxsize:
|
||||
for line in lines[1:]:
|
||||
trimmed.append(line[indent:].rstrip())
|
||||
|
||||
# Strip off trailing and leading blank lines
|
||||
while trimmed and not trimmed[-1]:
|
||||
trimmed.pop()
|
||||
|
||||
while trimmed and not trimmed[0]:
|
||||
trimmed.pop(0)
|
||||
|
||||
# Return a single string
|
||||
return '\n'.join(trimmed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,152 @@
|
||||
{%- set heading = ['-', '+', '#', '*', '^', '"', "'"] -%}
|
||||
{% macro render_action(parent, action, action_docs) %}
|
||||
|
||||
.. program:: {{cli_name}} {{parent + action}}
|
||||
.. _{{cli_name|replace('-','_')}}_{{parent|replace(' ','_')}}{{action}}:
|
||||
|
||||
{{ parent + action }}
|
||||
{{ heading[parent.count(' ')] * (parent + action)|length }}
|
||||
|
||||
{{ (action_docs['desc']|default(' ')) }}
|
||||
|
||||
{% if action_docs['options'] %}
|
||||
|
||||
|
||||
{% for option in action_docs['options']|sort(attribute='options') %}
|
||||
.. option:: {% for switch in option['options'] if switch in action_docs['option_names'] %}{{switch}} {% if option['arg'] %} <{{option['arg']}}>{% endif %}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
|
||||
{{ (option['desc']) }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{%- set nested_actions = action_docs['actions'] -%}
|
||||
{% if nested_actions %}
|
||||
|
||||
{% for nested_action in nested_actions %}
|
||||
{{ render_action(parent + action + ' ', nested_action, nested_actions[nested_action]) }}
|
||||
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{%- endmacro -%}
|
||||
:source: {{ source }}
|
||||
|
||||
{% set name = cli_name -%}
|
||||
{% set name_slug = cli_name -%}
|
||||
|
||||
.. _{{name}}:
|
||||
|
||||
{% set name_len = name|length + 0-%}
|
||||
{{ '=' * name_len }}
|
||||
{{name}}
|
||||
{{ '=' * name_len }}
|
||||
|
||||
|
||||
:strong:`{{short_desc|default('')}}`
|
||||
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
:depth: {{content_depth}}
|
||||
|
||||
|
||||
.. program:: {{cli_name}}
|
||||
|
||||
Synopsis
|
||||
========
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
{{ usage|replace('%prog', cli_name) }}
|
||||
|
||||
|
||||
Description
|
||||
===========
|
||||
|
||||
|
||||
{{ long_desc|default('', True) }}
|
||||
|
||||
{% if options %}
|
||||
Common Options
|
||||
==============
|
||||
|
||||
|
||||
{% for option in options|sort(attribute='options') if option.options %}
|
||||
|
||||
.. option:: {% for switch in option['options'] %}{{switch}}{% if option['arg'] %} <{{option['arg']}}>{% endif %}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
|
||||
{{ option['desc'] }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if arguments %}
|
||||
ARGUMENTS
|
||||
=========
|
||||
|
||||
.. program:: {{cli_name}}
|
||||
|
||||
{% for arg in arguments %}
|
||||
.. option:: {{ arg }}
|
||||
|
||||
{{ (arguments[arg]|default(' '))}}
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if actions %}
|
||||
Actions
|
||||
=======
|
||||
|
||||
{% for action in actions %}
|
||||
{{- render_action('', action, actions[action]) }}
|
||||
|
||||
|
||||
|
||||
{% endfor %}
|
||||
.. program:: {{cli_name}}
|
||||
{% endif %}
|
||||
|
||||
Environment
|
||||
===========
|
||||
|
||||
The following environment variables may be specified.
|
||||
|
||||
{% if inventory %}
|
||||
:envvar:`ANSIBLE_INVENTORY` -- Override the default ansible inventory file
|
||||
|
||||
{% endif %}
|
||||
{% if library %}
|
||||
:envvar:`ANSIBLE_LIBRARY` -- Override the default ansible module library path
|
||||
|
||||
{% endif %}
|
||||
:envvar:`ANSIBLE_CONFIG` -- Override the default ansible config file
|
||||
|
||||
Many more are available for most options in ansible.cfg
|
||||
|
||||
|
||||
Files
|
||||
=====
|
||||
|
||||
{% if inventory %}
|
||||
:file:`/etc/ansible/hosts` -- Default inventory file
|
||||
|
||||
{% endif %}
|
||||
:file:`/etc/ansible/ansible.cfg` -- Config file, used if present
|
||||
|
||||
:file:`~/.ansible.cfg` -- User config file, overrides the default config if present
|
||||
|
||||
Author
|
||||
======
|
||||
|
||||
Ansible was originally written by Michael DeHaan.
|
||||
|
||||
See the `AUTHORS` file for a complete list of contributors.
|
||||
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
Ansible is released under the terms of the GPLv3+ License.
|
||||
|
||||
See also
|
||||
========
|
||||
|
||||
{% for other in cli_bin_name_list|sort %}{% if other != cli_name %}:manpage:`{{other}}(1)`{% if not loop.last %}, {% endif %}{% endif %}{% endfor %}
|
@ -1 +0,0 @@
|
||||
"""PEP 517 build backend for optionally pre-building docs before setuptools."""
|
@ -1,170 +0,0 @@
|
||||
"""PEP 517 build backend wrapper for optionally pre-building docs for sdist."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import typing as t
|
||||
from configparser import ConfigParser
|
||||
from contextlib import contextmanager, suppress
|
||||
from importlib import import_module
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from shutil import copytree
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
try:
|
||||
from contextlib import chdir as _chdir_cm
|
||||
except ImportError:
|
||||
@contextmanager
|
||||
def _chdir_cm(path: os.PathLike) -> t.Iterator[None]:
|
||||
original_wd = Path.cwd()
|
||||
os.chdir(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(original_wd)
|
||||
|
||||
from setuptools.build_meta import (
|
||||
build_sdist as _setuptools_build_sdist,
|
||||
get_requires_for_build_sdist as _setuptools_get_requires_for_build_sdist,
|
||||
)
|
||||
|
||||
with suppress(ImportError):
|
||||
# NOTE: Only available for sdist builds that bundle manpages. Declared by
|
||||
# NOTE: `get_requires_for_build_sdist()` when `--build-manpages` is passed.
|
||||
from docutils.core import publish_file
|
||||
from docutils.writers import manpage
|
||||
|
||||
|
||||
__all__ = ( # noqa: WPS317, WPS410
|
||||
'build_sdist', 'get_requires_for_build_sdist',
|
||||
)
|
||||
|
||||
|
||||
BUILD_MANPAGES_CONFIG_SETTING = '--build-manpages'
|
||||
"""Config setting name toggle that is used to request manpage in sdists."""
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _run_in_temporary_directory() -> t.Iterator[Path]:
|
||||
with TemporaryDirectory(prefix='.tmp-ansible-pep517-') as tmp_dir:
|
||||
with _chdir_cm(tmp_dir):
|
||||
yield Path(tmp_dir)
|
||||
|
||||
|
||||
def _make_in_tree_ansible_importable() -> None:
|
||||
"""Add the library directory to module lookup paths."""
|
||||
lib_path = str(Path.cwd() / 'lib/')
|
||||
sys.path.insert(0, lib_path) # NOTE: for the current runtime session
|
||||
|
||||
|
||||
def _get_package_distribution_version() -> str:
|
||||
"""Retrieve the current version number from setuptools config."""
|
||||
setup_cfg_path = Path.cwd() / 'setup.cfg'
|
||||
setup_cfg = ConfigParser()
|
||||
setup_cfg.read_string(setup_cfg_path.read_text())
|
||||
cfg_version = setup_cfg.get('metadata', 'version')
|
||||
importable_version_str = cfg_version.removeprefix('attr: ')
|
||||
version_mod_str, version_var_str = importable_version_str.rsplit('.', 1)
|
||||
_make_in_tree_ansible_importable()
|
||||
return getattr(import_module(version_mod_str), version_var_str)
|
||||
|
||||
|
||||
def _generate_rst_in_templates() -> t.Iterable[Path]:
|
||||
"""Create ``*.1.rst.in`` files out of CLI Python modules."""
|
||||
generate_man_cmd = (
|
||||
sys.executable,
|
||||
Path(__file__).parent / '_generate_man.py',
|
||||
'--output-dir=docs/man/man1/',
|
||||
'--output-format=man',
|
||||
*Path('lib/ansible/cli/').glob('*.py'),
|
||||
)
|
||||
subprocess.check_call(tuple(map(str, generate_man_cmd)))
|
||||
return Path('docs/man/man1/').glob('*.1.rst.in')
|
||||
|
||||
|
||||
def _convert_rst_in_template_to_manpage(
|
||||
rst_doc_template: str,
|
||||
destination_path: os.PathLike,
|
||||
version_number: str,
|
||||
) -> None:
|
||||
"""Render pre-made ``*.1.rst.in`` templates into manpages.
|
||||
|
||||
This includes pasting the hardcoded version into the resulting files.
|
||||
The resulting ``in``-files are wiped in the process.
|
||||
"""
|
||||
templated_rst_doc = rst_doc_template.replace('%VERSION%', version_number)
|
||||
|
||||
with StringIO(templated_rst_doc) as in_mem_rst_doc:
|
||||
publish_file(
|
||||
source=in_mem_rst_doc,
|
||||
destination_path=destination_path,
|
||||
writer=manpage.Writer(),
|
||||
)
|
||||
|
||||
|
||||
def build_sdist( # noqa: WPS210, WPS430
|
||||
sdist_directory: os.PathLike,
|
||||
config_settings: dict[str, str] | None = None,
|
||||
) -> str:
|
||||
build_manpages_requested = BUILD_MANPAGES_CONFIG_SETTING in (
|
||||
config_settings or {}
|
||||
)
|
||||
original_src_dir = Path.cwd().resolve()
|
||||
with _run_in_temporary_directory() as tmp_dir:
|
||||
tmp_src_dir = Path(tmp_dir) / 'src'
|
||||
copytree(original_src_dir, tmp_src_dir, symlinks=True)
|
||||
os.chdir(tmp_src_dir)
|
||||
|
||||
if build_manpages_requested:
|
||||
Path('docs/man/man1/').mkdir(exist_ok=True, parents=True)
|
||||
version_number = _get_package_distribution_version()
|
||||
for rst_in in _generate_rst_in_templates():
|
||||
_convert_rst_in_template_to_manpage(
|
||||
rst_doc_template=rst_in.read_text(),
|
||||
destination_path=rst_in.with_suffix('').with_suffix(''),
|
||||
version_number=version_number,
|
||||
)
|
||||
rst_in.unlink()
|
||||
|
||||
Path('pyproject.toml').write_text(
|
||||
re.sub(
|
||||
r"""(?x)
|
||||
backend-path\s=\s\[ # value is a list of double-quoted strings
|
||||
[^]]+
|
||||
].*\n
|
||||
build-backend\s=\s"[^"]+".*\n # value is double-quoted
|
||||
""",
|
||||
'build-backend = "setuptools.build_meta"\n',
|
||||
Path('pyproject.toml').read_text(),
|
||||
)
|
||||
)
|
||||
|
||||
built_sdist_basename = _setuptools_build_sdist(
|
||||
sdist_directory=sdist_directory,
|
||||
config_settings=config_settings,
|
||||
)
|
||||
|
||||
return built_sdist_basename
|
||||
|
||||
|
||||
def get_requires_for_build_sdist(
|
||||
config_settings: dict[str, str] | None = None,
|
||||
) -> list[str]:
|
||||
build_manpages_requested = BUILD_MANPAGES_CONFIG_SETTING in (
|
||||
config_settings or {}
|
||||
)
|
||||
build_manpages_requested = True # FIXME: Once pypa/build#559 is addressed.
|
||||
|
||||
manpage_build_deps = [
|
||||
'docutils', # provides `rst2man`
|
||||
'jinja2', # used to generate man pages
|
||||
'pyyaml', # needed for importing in-tree `ansible-core` from `lib/`
|
||||
] if build_manpages_requested else []
|
||||
|
||||
return _setuptools_get_requires_for_build_sdist(
|
||||
config_settings=config_settings,
|
||||
) + manpage_build_deps
|
@ -1,312 +0,0 @@
|
||||
# coding: utf-8
|
||||
# Copyright: (c) 2019, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Generate cli documentation from cli docstrings."""
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
|
||||
DEFAULT_TEMPLATE_FILE = pathlib.Path(__file__).parent / '_templates/man.j2'
|
||||
|
||||
|
||||
# from https://www.python.org/dev/peps/pep-0257/
|
||||
def trim_docstring(docstring):
|
||||
if not docstring:
|
||||
return ''
|
||||
# Convert tabs to spaces (following the normal Python rules)
|
||||
# and split into a list of lines:
|
||||
lines = docstring.expandtabs().splitlines()
|
||||
# Determine minimum indentation (first line doesn't count):
|
||||
indent = sys.maxsize
|
||||
for line in lines[1:]:
|
||||
stripped = line.lstrip()
|
||||
if stripped:
|
||||
indent = min(indent, len(line) - len(stripped))
|
||||
# Remove indentation (first line is special):
|
||||
trimmed = [lines[0].strip()]
|
||||
if indent < sys.maxsize:
|
||||
for line in lines[1:]:
|
||||
trimmed.append(line[indent:].rstrip())
|
||||
# Strip off trailing and leading blank lines:
|
||||
while trimmed and not trimmed[-1]:
|
||||
trimmed.pop()
|
||||
while trimmed and not trimmed[0]:
|
||||
trimmed.pop(0)
|
||||
# Return a single string:
|
||||
return '\n'.join(trimmed)
|
||||
|
||||
|
||||
def get_options(optlist):
|
||||
''' get actual options '''
|
||||
|
||||
opts = []
|
||||
for opt in optlist:
|
||||
if opt.help == argparse.SUPPRESS:
|
||||
continue
|
||||
res = {
|
||||
'desc': opt.help,
|
||||
'options': opt.option_strings
|
||||
}
|
||||
if isinstance(opt, argparse._StoreAction):
|
||||
res['arg'] = opt.dest.upper()
|
||||
elif not res['options']:
|
||||
continue
|
||||
opts.append(res)
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def dedupe_groups(parser):
|
||||
action_groups = []
|
||||
for action_group in parser._action_groups:
|
||||
found = False
|
||||
for a in action_groups:
|
||||
if a._actions == action_group._actions:
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
action_groups.append(action_group)
|
||||
return action_groups
|
||||
|
||||
|
||||
def get_option_groups(option_parser):
|
||||
groups = []
|
||||
for action_group in dedupe_groups(option_parser)[1:]:
|
||||
group_info = {}
|
||||
group_info['desc'] = action_group.description
|
||||
group_info['options'] = action_group._actions
|
||||
group_info['group_obj'] = action_group
|
||||
groups.append(group_info)
|
||||
return groups
|
||||
|
||||
|
||||
def opt_doc_list(parser):
|
||||
''' iterate over options lists '''
|
||||
|
||||
results = []
|
||||
for option_group in dedupe_groups(parser)[1:]:
|
||||
results.extend(get_options(option_group._actions))
|
||||
|
||||
results.extend(get_options(parser._actions))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
# def opts_docs(cli, name):
|
||||
def opts_docs(cli_class_name, cli_module_name):
|
||||
''' generate doc structure from options '''
|
||||
|
||||
cli_name = 'ansible-%s' % cli_module_name
|
||||
if cli_module_name == 'adhoc':
|
||||
cli_name = 'ansible'
|
||||
|
||||
# WIth no action/subcommand
|
||||
# shared opts set
|
||||
# instantiate each cli and ask its options
|
||||
cli_klass = getattr(__import__("ansible.cli.%s" % cli_module_name,
|
||||
fromlist=[cli_class_name]), cli_class_name)
|
||||
cli = cli_klass([cli_name])
|
||||
|
||||
# parse the common options
|
||||
try:
|
||||
cli.init_parser()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# base/common cli info
|
||||
cli_options = opt_doc_list(cli.parser)
|
||||
docs = {
|
||||
'cli': cli_module_name,
|
||||
'cli_name': cli_name,
|
||||
'usage': cli.parser.format_usage(),
|
||||
'short_desc': cli.parser.description,
|
||||
'long_desc': trim_docstring(cli.__doc__),
|
||||
'actions': {},
|
||||
'content_depth': 2,
|
||||
'options': cli_options,
|
||||
'arguments': getattr(cli, 'ARGUMENTS', None),
|
||||
}
|
||||
option_info = {'option_names': [],
|
||||
'options': cli_options,
|
||||
'groups': []}
|
||||
|
||||
groups_info = get_option_groups(cli.parser)
|
||||
shared_opt_names = []
|
||||
for opt in cli_options:
|
||||
shared_opt_names.extend(opt.get('options', []))
|
||||
|
||||
option_info['option_names'] = shared_opt_names
|
||||
|
||||
option_info['groups'].extend(groups_info)
|
||||
|
||||
docs.update(option_info)
|
||||
|
||||
# now for each action/subcommand
|
||||
# force populate parser with per action options
|
||||
|
||||
def get_actions(parser, docs):
|
||||
# use class attrs not the attrs on a instance (not that it matters here...)
|
||||
try:
|
||||
subparser = parser._subparsers._group_actions[0].choices
|
||||
except AttributeError:
|
||||
subparser = {}
|
||||
|
||||
depth = 0
|
||||
|
||||
for action, parser in subparser.items():
|
||||
action_info = {'option_names': [],
|
||||
'options': [],
|
||||
'actions': {}}
|
||||
# docs['actions'][action] = {}
|
||||
# docs['actions'][action]['name'] = action
|
||||
action_info['name'] = action
|
||||
action_info['desc'] = trim_docstring(parser.get_default("func").__doc__)
|
||||
|
||||
# docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip()
|
||||
action_doc_list = opt_doc_list(parser)
|
||||
|
||||
uncommon_options = []
|
||||
for action_doc in action_doc_list:
|
||||
# uncommon_options = []
|
||||
|
||||
option_aliases = action_doc.get('options', [])
|
||||
for option_alias in option_aliases:
|
||||
|
||||
if option_alias in shared_opt_names:
|
||||
continue
|
||||
|
||||
# TODO: use set
|
||||
if option_alias not in action_info['option_names']:
|
||||
action_info['option_names'].append(option_alias)
|
||||
|
||||
if action_doc in action_info['options']:
|
||||
continue
|
||||
|
||||
uncommon_options.append(action_doc)
|
||||
|
||||
action_info['options'] = uncommon_options
|
||||
|
||||
depth = 1 + get_actions(parser, action_info)
|
||||
|
||||
docs['actions'][action] = action_info
|
||||
|
||||
return depth
|
||||
|
||||
action_depth = get_actions(cli.parser, docs)
|
||||
docs['content_depth'] = action_depth + 1
|
||||
|
||||
return docs
|
||||
|
||||
|
||||
class GenerateMan:
|
||||
name = 'generate-man'
|
||||
|
||||
@classmethod
|
||||
def init_parser(cls, parser: argparse.ArgumentParser):
|
||||
parser.add_argument("-t", "--template-file", action="store", dest="template_file",
|
||||
default=DEFAULT_TEMPLATE_FILE, help="path to jinja2 template")
|
||||
parser.add_argument("-o", "--output-dir", action="store", dest="output_dir",
|
||||
default='/tmp/', help="Output directory for rst files")
|
||||
parser.add_argument("-f", "--output-format", action="store", dest="output_format",
|
||||
default='man',
|
||||
help="Output format for docs (the default 'man' or 'rst')")
|
||||
parser.add_argument('cli_modules', help='CLI module name(s)', metavar='MODULE_NAME', nargs='*')
|
||||
|
||||
@staticmethod
|
||||
def main(args):
|
||||
template_file = args.template_file
|
||||
template_path = os.path.expanduser(template_file)
|
||||
template_dir = os.path.abspath(os.path.dirname(template_path))
|
||||
template_basename = os.path.basename(template_file)
|
||||
|
||||
output_dir = os.path.abspath(args.output_dir)
|
||||
output_format = args.output_format
|
||||
|
||||
cli_modules = args.cli_modules
|
||||
|
||||
# various cli parsing things checks sys.argv if the 'args' that are passed in are []
|
||||
# so just remove any args so the cli modules dont try to parse them resulting in warnings
|
||||
sys.argv = [sys.argv[0]]
|
||||
|
||||
allvars = {}
|
||||
output = {}
|
||||
cli_list = []
|
||||
cli_bin_name_list = []
|
||||
|
||||
# for binary in os.listdir('../../lib/ansible/cli'):
|
||||
for cli_module_name in cli_modules:
|
||||
binary = os.path.basename(os.path.expanduser(cli_module_name))
|
||||
|
||||
if not binary.endswith('.py'):
|
||||
continue
|
||||
elif binary == '__init__.py':
|
||||
continue
|
||||
|
||||
cli_name = os.path.splitext(binary)[0]
|
||||
|
||||
if cli_name == 'adhoc':
|
||||
cli_class_name = 'AdHocCLI'
|
||||
# myclass = 'AdHocCLI'
|
||||
output[cli_name] = 'ansible.1.rst.in'
|
||||
cli_bin_name = 'ansible'
|
||||
else:
|
||||
# myclass = "%sCLI" % libname.capitalize()
|
||||
cli_class_name = "%sCLI" % cli_name.capitalize()
|
||||
output[cli_name] = 'ansible-%s.1.rst.in' % cli_name
|
||||
cli_bin_name = 'ansible-%s' % cli_name
|
||||
|
||||
# FIXME:
|
||||
allvars[cli_name] = opts_docs(cli_class_name, cli_name)
|
||||
cli_bin_name_list.append(cli_bin_name)
|
||||
|
||||
cli_list = allvars.keys()
|
||||
|
||||
doc_name_formats = {'man': '%s.1.rst.in',
|
||||
'rst': '%s.rst'}
|
||||
|
||||
for cli_name in cli_list:
|
||||
|
||||
# template it!
|
||||
env = Environment(loader=FileSystemLoader(template_dir))
|
||||
template = env.get_template(template_basename)
|
||||
|
||||
# add rest to vars
|
||||
tvars = allvars[cli_name]
|
||||
tvars['cli_bin_name_list'] = cli_bin_name_list
|
||||
tvars['cli'] = cli_name
|
||||
if '-i' in tvars['option_names']:
|
||||
tvars['inventory'] = True
|
||||
print('uses inventory')
|
||||
if '-M' in tvars['option_names']:
|
||||
tvars['library'] = True
|
||||
print('uses library')
|
||||
|
||||
manpage = template.render(tvars)
|
||||
filename = os.path.join(output_dir, doc_name_formats[output_format] % tvars['cli_name'])
|
||||
pathlib.Path(filename).write_text(manpage)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
|
||||
GenerateMan.init_parser(parser)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
sys.path.insert(0, str(pathlib.Path(__file__).parent.parent.parent / 'lib'))
|
||||
|
||||
GenerateMan.main(args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,9 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""PEP 517 build backend for optionally pre-building docs before setuptools."""
|
||||
|
||||
from setuptools.build_meta import * # Re-exporting PEP 517 hooks # pylint: disable=unused-wildcard-import,wildcard-import
|
||||
|
||||
from ._backend import ( # noqa: WPS436 # Re-exporting PEP 517 hooks
|
||||
build_sdist, get_requires_for_build_sdist,
|
||||
)
|
@ -1,4 +1,3 @@
|
||||
[build-system]
|
||||
requires = ["setuptools >= 66.1.0"] # minimum setuptools version supporting Python 3.12
|
||||
backend-path = ["packaging"] # requires 'Pip>=20' or 'pep517>=0.6.0'
|
||||
build-backend = "pep517_backend.hooks" # wraps `setuptools.build_meta`
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
@ -1,3 +0,0 @@
|
||||
shippable/posix/group3
|
||||
context/controller
|
||||
packaging
|
@ -1,11 +0,0 @@
|
||||
setuptools == 66.1.0 # minimum requirement in pyproject.toml
|
||||
|
||||
|
||||
# An arbitrary old version that was released before Python 3.10.0:
|
||||
wheel == 0.33.6
|
||||
|
||||
# Conditional dependencies:
|
||||
docutils == 0.16
|
||||
Jinja2 == 3.0.0
|
||||
MarkupSafe == 2.0.0
|
||||
PyYAML == 5.3
|
@ -1,10 +0,0 @@
|
||||
setuptools == 68.0.0 # latest release as of this commit
|
||||
|
||||
# Wheel-only build dependency
|
||||
wheel == 0.38.4
|
||||
|
||||
# Conditional dependencies:
|
||||
docutils == 0.19
|
||||
Jinja2 == 3.1.2
|
||||
MarkupSafe == 2.1.2
|
||||
PyYAML == 6.0.1
|
@ -1,31 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
if [[ "${ANSIBLE_DEBUG}" == true ]] # `ansible-test` invoked with `--debug`
|
||||
then
|
||||
PYTEST_VERY_VERBOSE_FLAG=-vvvvv
|
||||
SET_DEBUG_MODE=-x
|
||||
else
|
||||
ANSIBLE_DEBUG=false
|
||||
PYTEST_VERY_VERBOSE_FLAG=
|
||||
SET_DEBUG_MODE=+x
|
||||
fi
|
||||
|
||||
|
||||
set -eEuo pipefail
|
||||
|
||||
source virtualenv.sh
|
||||
|
||||
set "${SET_DEBUG_MODE}"
|
||||
|
||||
export PIP_DISABLE_PIP_VERSION_CHECK=true
|
||||
export PIP_NO_PYTHON_VERSION_WARNING=true
|
||||
export PIP_NO_WARN_SCRIPT_LOCATION=true
|
||||
|
||||
python -Im pip install 'pytest ~= 7.2.0'
|
||||
python -Im pytest ${PYTEST_VERY_VERBOSE_FLAG} \
|
||||
--basetemp="${OUTPUT_DIR}/pytest-tmp" \
|
||||
--color=yes \
|
||||
--showlocals \
|
||||
-p no:forked \
|
||||
-p no:mock \
|
||||
-ra
|
@ -1,361 +0,0 @@
|
||||
"""Smoke tests for the in-tree PEP 517 backend."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from filecmp import dircmp
|
||||
from os import chdir, environ, PathLike
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from subprocess import check_call, check_output, PIPE
|
||||
from sys import executable as current_interpreter, version_info
|
||||
from tarfile import TarFile
|
||||
import typing as t
|
||||
|
||||
try:
|
||||
from contextlib import chdir as _chdir_cm
|
||||
except ImportError:
|
||||
from contextlib import contextmanager as _contextmanager
|
||||
|
||||
@_contextmanager
|
||||
def _chdir_cm(path: PathLike) -> t.Iterator[None]:
|
||||
original_wd = Path.cwd()
|
||||
chdir(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
chdir(original_wd)
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
DIST_NAME = 'ansible_core'
|
||||
DIST_FILENAME_BASE = 'ansible-core'
|
||||
OUTPUT_DIR = Path(environ['OUTPUT_DIR']).resolve().absolute()
|
||||
SRC_ROOT_DIR = OUTPUT_DIR.parents[3]
|
||||
GENERATED_MANPAGES_SUBDIR = SRC_ROOT_DIR / 'docs' / 'man' / 'man1'
|
||||
LOWEST_SUPPORTED_BUILD_DEPS_FILE = (
|
||||
Path(__file__).parent / 'minimum-build-constraints.txt'
|
||||
).resolve().absolute()
|
||||
MODERNISH_BUILD_DEPS_FILE = (
|
||||
Path(__file__).parent / 'modernish-build-constraints.txt'
|
||||
).resolve().absolute()
|
||||
RELEASE_MODULE = SRC_ROOT_DIR / 'lib' / 'ansible' / 'release.py'
|
||||
VERSION_LINE_PREFIX = "__version__ = '"
|
||||
PKG_DIST_VERSION = next(
|
||||
line[len(VERSION_LINE_PREFIX):-1]
|
||||
for line in RELEASE_MODULE.read_text().splitlines()
|
||||
if line.startswith(VERSION_LINE_PREFIX)
|
||||
)
|
||||
EXPECTED_SDIST_NAME_BASE = f'{DIST_FILENAME_BASE}-{PKG_DIST_VERSION}'
|
||||
EXPECTED_SDIST_NAME = f'{EXPECTED_SDIST_NAME_BASE}.tar.gz'
|
||||
EXPECTED_WHEEL_NAME = f'{DIST_NAME}-{PKG_DIST_VERSION}-py3-none-any.whl'
|
||||
|
||||
IS_PYTHON310_PLUS = version_info[:2] >= (3, 10)
|
||||
|
||||
|
||||
def wipe_generated_manpages() -> None:
|
||||
"""Ensure man1 pages aren't present in the source checkout."""
|
||||
# Cleaning up the gitignored manpages...
|
||||
if not GENERATED_MANPAGES_SUBDIR.exists():
|
||||
return
|
||||
|
||||
rmtree(GENERATED_MANPAGES_SUBDIR)
|
||||
# Removed the generated manpages...
|
||||
|
||||
|
||||
def contains_man1_pages(sdist_tarball: Path) -> Path:
|
||||
"""Check if the man1 pages are present in given tarball."""
|
||||
with sdist_tarball.open(mode='rb') as tarball_fd:
|
||||
with TarFile.gzopen(fileobj=tarball_fd, name=None) as tarball:
|
||||
try:
|
||||
tarball.getmember(
|
||||
name=f'{EXPECTED_SDIST_NAME_BASE}/docs/man/man1',
|
||||
)
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def unpack_sdist(sdist_tarball: Path, target_directory: Path) -> Path:
|
||||
"""Unarchive given tarball.
|
||||
|
||||
:returns: Path of the package source checkout.
|
||||
"""
|
||||
with sdist_tarball.open(mode='rb') as tarball_fd:
|
||||
with TarFile.gzopen(fileobj=tarball_fd, name=None) as tarball:
|
||||
tarball.extractall(path=target_directory)
|
||||
return target_directory / EXPECTED_SDIST_NAME_BASE
|
||||
|
||||
|
||||
def assert_dirs_equal(*dir_paths: t.List[Path]) -> None:
|
||||
dir_comparison = dircmp(*dir_paths)
|
||||
assert not dir_comparison.left_only
|
||||
assert not dir_comparison.right_only
|
||||
assert not dir_comparison.diff_files
|
||||
assert not dir_comparison.funny_files
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def venv_python_exe(tmp_path: Path) -> t.Iterator[Path]:
|
||||
venv_path = tmp_path / 'pytest-managed-venv'
|
||||
mkvenv_cmd = (
|
||||
current_interpreter, '-m', 'venv', str(venv_path),
|
||||
)
|
||||
check_call(mkvenv_cmd, env={}, stderr=PIPE, stdout=PIPE)
|
||||
yield venv_path / 'bin' / 'python'
|
||||
rmtree(venv_path)
|
||||
|
||||
|
||||
def run_with_venv_python(
|
||||
python_exe: Path, *cli_args: t.Iterable[str],
|
||||
env_vars: t.Dict[str, str] = None,
|
||||
) -> str:
|
||||
if env_vars is None:
|
||||
env_vars = {}
|
||||
full_cmd = str(python_exe), *cli_args
|
||||
return check_output(full_cmd, env=env_vars, stderr=PIPE)
|
||||
|
||||
|
||||
def build_dists(
|
||||
python_exe: Path, *cli_args: t.Iterable[str],
|
||||
env_vars: t.Dict[str, str],
|
||||
) -> str:
|
||||
return run_with_venv_python(
|
||||
python_exe, '-m', 'build',
|
||||
*cli_args, env_vars=env_vars,
|
||||
)
|
||||
|
||||
|
||||
def pip_install(
|
||||
python_exe: Path, *cli_args: t.Iterable[str],
|
||||
env_vars: t.Dict[str, str] = None,
|
||||
) -> str:
|
||||
return run_with_venv_python(
|
||||
python_exe, '-m', 'pip', 'install',
|
||||
*cli_args, env_vars=env_vars,
|
||||
)
|
||||
|
||||
|
||||
def test_installing_sdist_build_with_modern_deps_to_old_env(
|
||||
venv_python_exe: Path, tmp_path: Path,
|
||||
) -> None:
|
||||
pip_install(venv_python_exe, 'build ~= 0.10.0')
|
||||
tmp_dir_sdist_w_modern_tools = tmp_path / 'sdist-w-modern-tools'
|
||||
build_dists(
|
||||
venv_python_exe, '--sdist',
|
||||
'--config-setting=--build-manpages',
|
||||
f'--outdir={tmp_dir_sdist_w_modern_tools!s}',
|
||||
str(SRC_ROOT_DIR),
|
||||
env_vars={
|
||||
'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE),
|
||||
},
|
||||
)
|
||||
tmp_path_sdist_w_modern_tools = (
|
||||
tmp_dir_sdist_w_modern_tools / EXPECTED_SDIST_NAME
|
||||
)
|
||||
|
||||
# Downgrading pip, because v20+ supports in-tree build backends
|
||||
pip_install(venv_python_exe, 'pip ~= 19.3.1')
|
||||
|
||||
# Smoke test — installing an sdist with pip that does not support
|
||||
# in-tree build backends.
|
||||
pip_install(
|
||||
venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps',
|
||||
)
|
||||
|
||||
# Downgrading pip, because versions that support PEP 517 don't allow
|
||||
# disabling it with `--no-use-pep517` when `build-backend` is set in
|
||||
# the `[build-system]` section of `pyproject.toml`, considering this
|
||||
# an explicit opt-in.
|
||||
if not IS_PYTHON310_PLUS:
|
||||
pip_install(venv_python_exe, 'pip == 18.0')
|
||||
|
||||
# Smoke test — installing an sdist with pip that does not support invoking
|
||||
# PEP 517 interface at all.
|
||||
# In this scenario, pip will run `setup.py install` since `wheel` is not in
|
||||
# the environment.
|
||||
if IS_PYTHON310_PLUS:
|
||||
tmp_dir_unpacked_sdist_root = tmp_path / 'unpacked-sdist'
|
||||
tmp_dir_unpacked_sdist_path = tmp_dir_unpacked_sdist_root / EXPECTED_SDIST_NAME_BASE
|
||||
with TarFile.gzopen(tmp_path_sdist_w_modern_tools) as sdist_fd:
|
||||
sdist_fd.extractall(path=tmp_dir_unpacked_sdist_root)
|
||||
|
||||
pip_install(
|
||||
venv_python_exe, 'setuptools',
|
||||
env_vars={
|
||||
'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE),
|
||||
},
|
||||
)
|
||||
with _chdir_cm(tmp_dir_unpacked_sdist_path):
|
||||
run_with_venv_python(
|
||||
venv_python_exe, 'setup.py', 'sdist',
|
||||
env_vars={'PATH': environ['PATH']},
|
||||
)
|
||||
else:
|
||||
pip_install(
|
||||
venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps',
|
||||
env_vars={
|
||||
'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE),
|
||||
},
|
||||
)
|
||||
|
||||
# Smoke test — installing an sdist with pip that does not support invoking
|
||||
# PEP 517 interface at all.
|
||||
# With `wheel` present, pip will run `setup.py bdist_wheel` and then,
|
||||
# unpack the result.
|
||||
pip_install(venv_python_exe, 'wheel')
|
||||
if IS_PYTHON310_PLUS:
|
||||
with _chdir_cm(tmp_dir_unpacked_sdist_path):
|
||||
run_with_venv_python(
|
||||
venv_python_exe, 'setup.py', 'bdist_wheel',
|
||||
env_vars={'PATH': environ['PATH']},
|
||||
)
|
||||
else:
|
||||
pip_install(
|
||||
venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps',
|
||||
)
|
||||
|
||||
|
||||
def test_dist_rebuilds_with_manpages_premutations(
|
||||
venv_python_exe: Path, tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test a series of sdist rebuilds under different conditions.
|
||||
|
||||
This check builds sdists right from the Git checkout with and without
|
||||
the manpages. It also does this using different versions of the setuptools
|
||||
PEP 517 build backend being pinned. Finally, it builds a wheel out of one
|
||||
of the rebuilt sdists.
|
||||
As intermediate assertions, this test makes simple smoke tests along
|
||||
the way.
|
||||
"""
|
||||
pip_install(venv_python_exe, 'build ~= 0.10.0')
|
||||
|
||||
# Test building an sdist without manpages from the Git checkout
|
||||
tmp_dir_sdist_without_manpages = tmp_path / 'sdist-without-manpages'
|
||||
wipe_generated_manpages()
|
||||
build_dists(
|
||||
venv_python_exe, '--sdist',
|
||||
f'--outdir={tmp_dir_sdist_without_manpages!s}',
|
||||
str(SRC_ROOT_DIR),
|
||||
env_vars={
|
||||
'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE),
|
||||
},
|
||||
)
|
||||
tmp_path_sdist_without_manpages = (
|
||||
tmp_dir_sdist_without_manpages / EXPECTED_SDIST_NAME
|
||||
)
|
||||
assert tmp_path_sdist_without_manpages.exists()
|
||||
assert not contains_man1_pages(tmp_path_sdist_without_manpages)
|
||||
sdist_without_manpages_path = unpack_sdist(
|
||||
tmp_path_sdist_without_manpages,
|
||||
tmp_dir_sdist_without_manpages / 'src',
|
||||
)
|
||||
|
||||
# Test building an sdist with manpages from the Git checkout
|
||||
# and lowest supported build deps
|
||||
wipe_generated_manpages()
|
||||
tmp_dir_sdist_with_manpages = tmp_path / 'sdist-with-manpages'
|
||||
build_dists(
|
||||
venv_python_exe, '--sdist',
|
||||
'--config-setting=--build-manpages',
|
||||
f'--outdir={tmp_dir_sdist_with_manpages!s}',
|
||||
str(SRC_ROOT_DIR),
|
||||
env_vars={
|
||||
'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE),
|
||||
},
|
||||
)
|
||||
tmp_path_sdist_with_manpages = (
|
||||
tmp_dir_sdist_with_manpages / EXPECTED_SDIST_NAME
|
||||
)
|
||||
assert tmp_path_sdist_with_manpages.exists()
|
||||
assert contains_man1_pages(tmp_path_sdist_with_manpages)
|
||||
sdist_with_manpages_path = unpack_sdist(
|
||||
tmp_path_sdist_with_manpages,
|
||||
tmp_dir_sdist_with_manpages / 'src',
|
||||
)
|
||||
|
||||
# Test re-building an sdist with manpages from the
|
||||
# sdist contents that does not include the manpages
|
||||
tmp_dir_rebuilt_sdist = tmp_path / 'rebuilt-sdist'
|
||||
build_dists(
|
||||
venv_python_exe, '--sdist',
|
||||
'--config-setting=--build-manpages',
|
||||
f'--outdir={tmp_dir_rebuilt_sdist!s}',
|
||||
str(sdist_without_manpages_path),
|
||||
env_vars={
|
||||
'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE),
|
||||
},
|
||||
)
|
||||
tmp_path_rebuilt_sdist = tmp_dir_rebuilt_sdist / EXPECTED_SDIST_NAME
|
||||
# Checking that the expected sdist got created
|
||||
# from the previous unpacked sdist...
|
||||
assert tmp_path_rebuilt_sdist.exists()
|
||||
# NOTE: The following assertion is disabled due to the fact that, when
|
||||
# NOTE: building an sdist from the original source checkout, the build
|
||||
# NOTE: backend replaces itself with pure setuptools in the resulting
|
||||
# NOTE: sdist, and the following rebuilds from that sdist are no longer
|
||||
# NOTE: able to process the custom config settings that are implemented in
|
||||
# NOTE: the in-tree build backend. It is expected that said
|
||||
# NOTE: `pyproject.toml` mutation change will be reverted once all of the
|
||||
# NOTE: supported `ansible-core` versions ship wheels, meaning that the
|
||||
# NOTE: end-users won't be building the distribution from sdist on install.
|
||||
# NOTE: Another case, when it can be reverted is declaring pip below v20
|
||||
# NOTE: unsupported — it is the first version to support in-tree build
|
||||
# NOTE: backends natively.
|
||||
# assert contains_man1_pages(tmp_path_rebuilt_sdist) # FIXME: See #80255
|
||||
rebuilt_sdist_path = unpack_sdist(
|
||||
tmp_path_rebuilt_sdist,
|
||||
tmp_dir_rebuilt_sdist / 'src',
|
||||
)
|
||||
assert rebuilt_sdist_path.exists()
|
||||
assert rebuilt_sdist_path.is_dir()
|
||||
# Ensure the man page directory exists to ease diff comparison.
|
||||
for dir_path in (rebuilt_sdist_path, sdist_with_manpages_path):
|
||||
(dir_path / 'docs/man/man1').mkdir(parents=True, exist_ok=True)
|
||||
assert_dirs_equal(rebuilt_sdist_path, sdist_with_manpages_path)
|
||||
|
||||
# Test building a wheel from the rebuilt sdist with manpages contents
|
||||
# and lowest supported build deps
|
||||
tmp_dir_rebuilt_wheel = tmp_path / 'rebuilt-wheel'
|
||||
build_dists(
|
||||
venv_python_exe, '--wheel',
|
||||
f'--outdir={tmp_dir_rebuilt_wheel!s}',
|
||||
str(sdist_with_manpages_path),
|
||||
env_vars={
|
||||
'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE),
|
||||
},
|
||||
)
|
||||
tmp_path_rebuilt_wheel = tmp_dir_rebuilt_wheel / EXPECTED_WHEEL_NAME
|
||||
# Checking that the expected wheel got created...
|
||||
assert tmp_path_rebuilt_wheel.exists()
|
||||
|
||||
|
||||
def test_pep660_editable_install_smoke(venv_python_exe: Path) -> None:
|
||||
"""Smoke-test PEP 660 editable install.
|
||||
|
||||
This verifies that the in-tree build backend wrapper
|
||||
does not break any required interfaces.
|
||||
"""
|
||||
pip_install(venv_python_exe, '-e', str(SRC_ROOT_DIR))
|
||||
|
||||
pip_show_cmd = (
|
||||
str(venv_python_exe), '-m',
|
||||
'pip', 'show', DIST_FILENAME_BASE,
|
||||
)
|
||||
installed_ansible_meta = check_output(
|
||||
pip_show_cmd,
|
||||
env={}, stderr=PIPE, text=True,
|
||||
).splitlines()
|
||||
assert f'Name: {DIST_FILENAME_BASE}' in installed_ansible_meta
|
||||
assert f'Version: {PKG_DIST_VERSION}' in installed_ansible_meta
|
||||
|
||||
pip_runtime_version_cmd = (
|
||||
str(venv_python_exe), '-c',
|
||||
'from ansible import __version__; print(__version__)',
|
||||
)
|
||||
runtime_ansible_version = check_output(
|
||||
pip_runtime_version_cmd,
|
||||
env={}, stderr=PIPE, text=True,
|
||||
).strip()
|
||||
assert runtime_ansible_version == PKG_DIST_VERSION
|
@ -0,0 +1,2 @@
|
||||
shippable/posix/group5
|
||||
context/controller
|
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -eux
|
||||
|
||||
source virtualenv.sh
|
||||
|
||||
mkdir -p "${JUNIT_OUTPUT_DIR}" # ensure paths relative to this path work
|
||||
|
||||
cli_doc="${JUNIT_OUTPUT_DIR}/../../../packaging/cli-doc"
|
||||
build="${cli_doc}/build.py"
|
||||
template="template.j2"
|
||||
|
||||
# Test `rst` command
|
||||
|
||||
pip install jinja2
|
||||
|
||||
rst_dir="${OUTPUT_DIR}/rst"
|
||||
|
||||
python.py "${build}" rst --output-dir "${rst_dir}" && ./verify.py "${rst_dir}"
|
||||
python.py "${build}" rst --output-dir "${rst_dir}" --template "${template}" && ./verify.py "${rst_dir}"
|
||||
|
||||
# Test `man` command (and the argcomplete code path)
|
||||
|
||||
pip install docutils argcomplete
|
||||
|
||||
man_dir="${OUTPUT_DIR}/man"
|
||||
|
||||
python.py "${build}" man --output-dir "${man_dir}" && ./verify.py "${man_dir}"
|
||||
python.py "${build}" man --output-dir "${man_dir}" --template "${template}" && ./verify.py "${man_dir}"
|
||||
|
||||
# Test `json` command
|
||||
|
||||
python.py "${build}" json --output-file docs.json && ls -l docs.json
|
||||
|
||||
# Ensure complete coverage of the main conditional
|
||||
|
||||
echo "import sys; sys.path.insert(0, '${cli_doc}'); import build" > cover.py
|
||||
python.py cover.py
|
@ -0,0 +1 @@
|
||||
{{ version }}
|
@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
exclude_programs = {
|
||||
'ansible-connection',
|
||||
'ansible-test',
|
||||
}
|
||||
|
||||
bin_dir = pathlib.Path(os.environ['JUNIT_OUTPUT_DIR']).parent.parent.parent / 'bin'
|
||||
programs = set(program.name for program in bin_dir.iterdir() if program.name not in exclude_programs)
|
||||
docs_dir = pathlib.Path(sys.argv[1])
|
||||
docs = set(path.with_suffix('').name for path in docs_dir.iterdir())
|
||||
|
||||
print('\n'.join(sorted(docs)))
|
||||
|
||||
missing = programs - docs
|
||||
extra = docs - programs
|
||||
|
||||
if missing or extra:
|
||||
raise RuntimeError(f'{missing=} {extra=}')
|
@ -0,0 +1,20 @@
|
||||
# IMPORTANT
|
||||
# Set "ignore_missing_imports" per package below, rather than globally.
|
||||
# That will help identify missing type stubs that should be added to the sanity test environment.
|
||||
|
||||
[mypy]
|
||||
|
||||
[mypy-docutils]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-docutils.core]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-docutils.writers]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-docutils.writers.manpage]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-argcomplete]
|
||||
ignore_missing_imports = True
|
Loading…
Reference in New Issue