diff --git a/.azure-pipelines/azure-pipelines.yml b/.azure-pipelines/azure-pipelines.yml index 7438d4219cf..c3619bc0349 100644 --- a/.azure-pipelines/azure-pipelines.yml +++ b/.azure-pipelines/azure-pipelines.yml @@ -68,9 +68,20 @@ stages: nameFormat: Server {0} testFormat: windows/{0}/1 targets: - - test: 2016 - - test: 2019 - - test: 2022 + - name: 2016 WinRM HTTP + test: 2016/winrm/http + - name: 2019 WinRM HTTPS + test: 2019/winrm/https + - name: 2022 WinRM HTTPS + test: 2022/winrm/https + - name: 2022 PSRP HTTP + test: 2022/psrp/http + - name: 2022 SSH Key + test: 2022/ssh/key + - name: 2025 PSRP HTTP + test: 2025/psrp/http + - name: 2025 SSH Key + test: 2025/ssh/key - stage: Remote dependsOn: [] jobs: @@ -79,14 +90,14 @@ stages: targets: - name: macOS 14.3 test: macos/14.3 - - name: RHEL 9.3 py39 - test: rhel/9.3@3.9 - - name: RHEL 9.3 py311 - test: rhel/9.3@3.11 - - name: FreeBSD 13.3 - test: freebsd/13.3 - - name: FreeBSD 14.0 - test: freebsd/14.0 + - name: RHEL 9.4 py39 + test: rhel/9.4@3.9 + - name: RHEL 9.4 py312 + test: rhel/9.4@3.12 + - name: FreeBSD 13.4 + test: freebsd/13.4 + - name: FreeBSD 14.1 + test: freebsd/14.1 groups: - 1 - 2 @@ -95,12 +106,12 @@ stages: targets: - name: macOS 14.3 test: macos/14.3 - - name: RHEL 9.3 - test: rhel/9.3 - - name: FreeBSD 13.3 - test: freebsd/13.3 - - name: FreeBSD 14.0 - test: freebsd/14.0 + - name: RHEL 9.4 + test: rhel/9.4 + - name: FreeBSD 13.4 + test: freebsd/13.4 + - name: FreeBSD 14.1 + test: freebsd/14.1 groups: - 3 - 4 @@ -108,44 +119,44 @@ stages: - template: templates/matrix.yml # context/controller (ansible-test container management) parameters: targets: - - name: Alpine 3.19 - test: alpine/3.19 - - name: Fedora 39 - test: fedora/39 - - name: RHEL 9.3 - test: rhel/9.3 - - name: Ubuntu 22.04 - test: ubuntu/22.04 + - name: Alpine 3.20 + test: alpine/3.20 + - name: Fedora 40 + test: fedora/40 + - name: RHEL 9.4 + test: rhel/9.4 + - name: Ubuntu 24.04 + test: ubuntu/24.04 groups: - 6 - stage: Docker dependsOn: [] jobs: - - template: templates/matrix.yml + - template: templates/matrix.yml # context/target parameters: testFormat: linux/{0} targets: - - name: Alpine 3.19 - test: alpine319 - - name: Fedora 39 - test: fedora39 - - name: Ubuntu 20.04 - test: ubuntu2004 + - name: Alpine 3.20 + test: alpine320 + - name: Fedora 40 + test: fedora40 - name: Ubuntu 22.04 test: ubuntu2204 + - name: Ubuntu 24.04 + test: ubuntu2404 groups: - 1 - 2 - - template: templates/matrix.yml + - template: templates/matrix.yml # context/controller parameters: testFormat: linux/{0} targets: - - name: Alpine 3.19 - test: alpine319 - - name: Fedora 39 - test: fedora39 - - name: Ubuntu 22.04 - test: ubuntu2204 + - name: Alpine 3.20 + test: alpine320 + - name: Fedora 40 + test: fedora40 + - name: Ubuntu 24.04 + test: ubuntu2404 groups: - 3 - 4 @@ -158,7 +169,6 @@ stages: nameFormat: Python {0} testFormat: galaxy/{0}/1 targets: - - test: '3.10' - test: 3.11 - test: 3.12 - test: 3.13 @@ -170,7 +180,6 @@ stages: nameFormat: Python {0} testFormat: generic/{0}/1 targets: - - test: '3.10' - test: 3.11 - test: 3.12 - test: 3.13 @@ -183,9 +192,20 @@ stages: nameFormat: Server {0} testFormat: i/windows/{0} targets: - - test: 2016 - - test: 2019 - - test: 2022 + - name: 2016 WinRM HTTP + test: 2016/winrm/http + - name: 2019 WinRM HTTPS + test: 2019/winrm/https + - name: 2022 WinRM HTTPS + test: 2022/winrm/https + - name: 2022 PSRP HTTP + test: 2022/psrp/http + - name: 2022 SSH Key + test: 2022/ssh/key + - name: 2025 PSRP HTTP + test: 2025/psrp/http + - name: 2025 SSH Key + test: 2025/ssh/key - stage: Incidental dependsOn: [] jobs: @@ -195,8 +215,6 @@ stages: targets: - name: IOS Python test: ios/csr1000v/ - - name: VyOS Python - test: vyos/1.1.8/ - stage: Summary condition: succeededOrFailed() dependsOn: diff --git a/.azure-pipelines/commands/incidental/vyos.sh b/.azure-pipelines/commands/incidental/vyos.sh deleted file mode 120000 index cad3e41b707..00000000000 --- a/.azure-pipelines/commands/incidental/vyos.sh +++ /dev/null @@ -1 +0,0 @@ -network.sh \ No newline at end of file diff --git a/.azure-pipelines/commands/incidental/windows.sh b/.azure-pipelines/commands/incidental/windows.sh index 24272f62baf..f5a3070c457 100755 --- a/.azure-pipelines/commands/incidental/windows.sh +++ b/.azure-pipelines/commands/incidental/windows.sh @@ -6,6 +6,8 @@ declare -a args IFS='/:' read -ra args <<< "$1" version="${args[1]}" +connection="${args[2]}" +connection_setting="${args[3]}" target="shippable/windows/incidental/" @@ -26,11 +28,7 @@ if [ -s /tmp/windows.txt ] || [ "${CHANGED:+$CHANGED}" == "" ]; then echo "Detected changes requiring integration tests specific to Windows:" cat /tmp/windows.txt - echo "Running Windows integration tests for multiple versions concurrently." - - platforms=( - --windows "${version}" - ) + echo "Running Windows integration tests for the version ${version}." else echo "No changes requiring integration tests specific to Windows were detected." echo "Running Windows integration tests for a single version only: ${single_version}" @@ -39,14 +37,10 @@ else echo "Skipping this job since it is for: ${version}" exit 0 fi - - platforms=( - --windows "${version}" - ) fi # shellcheck disable=SC2086 ansible-test windows-integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \ - "${platforms[@]}" \ - --docker default --python "${python_default}" \ - --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}" + --controller "docker:default,python=${python_default}" \ + --target "remote:windows/${version},connection=${connection}+${connection_setting},provider=${provider}" \ + --remote-terminate always --remote-stage "${stage}" diff --git a/.azure-pipelines/commands/windows.sh b/.azure-pipelines/commands/windows.sh index 693d4f24bdc..622eb9e2d5e 100755 --- a/.azure-pipelines/commands/windows.sh +++ b/.azure-pipelines/commands/windows.sh @@ -6,7 +6,9 @@ declare -a args IFS='/:' read -ra args <<< "$1" version="${args[1]}" -group="${args[2]}" +connection="${args[2]}" +connection_setting="${args[3]}" +group="${args[4]}" target="shippable/windows/group${group}/" @@ -31,11 +33,7 @@ if [ -s /tmp/windows.txt ] || [ "${CHANGED:+$CHANGED}" == "" ]; then echo "Detected changes requiring integration tests specific to Windows:" cat /tmp/windows.txt - echo "Running Windows integration tests for multiple versions concurrently." - - platforms=( - --windows "${version}" - ) + echo "Running Windows integration tests for the version ${version}." else echo "No changes requiring integration tests specific to Windows were detected." echo "Running Windows integration tests for a single version only: ${single_version}" @@ -44,17 +42,13 @@ else echo "Skipping this job since it is for: ${version}" exit 0 fi - - platforms=( - --windows "${version}" - ) fi -for version in "${python_versions[@]}"; do +for py_version in "${python_versions[@]}"; do changed_all_target="all" changed_all_mode="default" - if [ "${version}" == "${python_default}" ]; then + if [ "${py_version}" == "${python_default}" ]; then # smoketest tests if [ "${CHANGED}" ]; then # with change detection enabled run tests for anything changed @@ -80,7 +74,7 @@ for version in "${python_versions[@]}"; do fi # terminate remote instances on the final python version tested - if [ "${version}" = "${python_versions[-1]}" ]; then + if [ "${py_version}" = "${python_versions[-1]}" ]; then terminate="always" else terminate="never" @@ -88,7 +82,8 @@ for version in "${python_versions[@]}"; do # shellcheck disable=SC2086 ansible-test windows-integration --color -v --retry-on-error "${ci}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \ - "${platforms[@]}" --changed-all-target "${changed_all_target}" --changed-all-mode "${changed_all_mode}" \ - --docker default --python "${version}" \ - --remote-terminate "${terminate}" --remote-stage "${stage}" --remote-provider "${provider}" + --changed-all-target "${changed_all_target}" --changed-all-mode "${changed_all_mode}" \ + --controller "docker:default,python=${py_version}" \ + --target "remote:windows/${version},connection=${connection}+${connection_setting},provider=${provider}" \ + --remote-terminate "${terminate}" --remote-stage "${stage}" done diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 24f4438fbb3..72ee0f901d7 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -2,3 +2,5 @@ # Bulk PowerShell sanity fixes 6def4a3180fe03981ba64c6d8db28fed3bb39c0c 716631189cb5a3f66b3add98f39e64e98bc17bf7 +# Bulk update of strings from triple single quotes to triple double quotes +a0495fc31497798a7a833ba7406a9729e1528dd8 diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 300cff38324..fc15ea5dfc2 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -4,11 +4,14 @@ Hi! Nice to see you here! ## QUESTIONS ? -Please see the [Community Guide](https://docs.ansible.com/ansible/latest/community/index.html) for information on -how to ask questions on the [mailing lists](https://docs.ansible.com/ansible/latest/community/communication.html#mailing-list-information) and IRC. +If you have questions about anything related to Ansible, get in touch with us! +See [Communicating with the Ansible community](https://docs.ansible.com/ansible/devel/community/communication.html) to find out how. -The GitHub issue tracker is not the best place for questions for various reasons, -but both IRC and the mailing list are very helpful places for those things, as the community page explains best. +The [Community Guide](https://docs.ansible.com/ansible/devel/community/index.html) also explains how to contribute +and interact with the project, including how to submit bug reports and code to Ansible. + +Please note that the GitHub issue tracker is not the best place to ask questions for several reasons. +You'll get more helpful, and quicker, responses in the forum. ## CONTRIBUTING ? @@ -17,11 +20,11 @@ By contributing to this project you agree to the [Developer Certificate of Origi The Ansible project is licensed under the [GPL-3.0](COPYING) or later. Some portions of the code fall under other licenses as noted in individual files. The Ansible project accepts contributions through GitHub pull requests. -Please review the [Community Guide](https://docs.ansible.com/ansible/latest/community/index.html) for more information on contributing to Ansible. +Please review the [Community Guide](https://docs.ansible.com/ansible/devel/community/index.html) for more information on contributing to Ansible. ## BUG TO REPORT ? -First and foremost, also check the [Community Guide](https://docs.ansible.com/ansible/latest/community/index.html). +First and foremost, also check the [Community Guide](https://docs.ansible.com/ansible/devel/community/index.html). You can report bugs or make enhancement requests at the [Ansible GitHub issue page](http://github.com/ansible/ansible/issues/new/choose) by filling out the issue template that will be presented. diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 3159784d158..8f4944c43c0 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -19,13 +19,14 @@ body: Also test if the latest release and devel branch are affected too. - **Tip:** If you are seeking community support, please consider - [starting a mailing list thread or chatting in IRC][ML||IRC]. + **Tip:** If you are seeking community support, please see + [Communicating with the Ansible community][communication] to + get in touch and ask questions. - [ML||IRC]: - https://docs.ansible.com/ansible-core/devel/community/communication.html?utm_medium=github&utm_source=issue_form--bug_report.yml#mailing-list-information + [communication]: + https://docs.ansible.com/ansible/devel/community/communication.html [issue search]: ../search?q=is%3Aissue&type=issues @@ -54,7 +55,7 @@ body: Why? - We would do it by ourselves but unfortunatelly, the curent + We would do it by ourselves but unfortunately, the current edition of GitHub Issue Forms Alpha does not support this yet 🤷 @@ -258,7 +259,7 @@ body: description: | Read the [Ansible Code of Conduct][CoC] first. - [CoC]: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--bug_report.yml + [CoC]: https://docs.ansible.com/ansible/devel/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--bug_report.yml options: - label: I agree to follow the Ansible Code of Conduct required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 74ec5696fdf..6aa4a2b7647 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -2,7 +2,7 @@ blank_issues_enabled: false # default: true contact_links: - name: 🔐 Security bug report 🔥 - url: https://docs.ansible.com/ansible/latest/community/reporting_bugs_and_features.html?utm_medium=github&utm_source=issue_template_chooser + url: https://docs.ansible.com/ansible/devel/community/reporting_bugs_and_features.html?utm_medium=github&utm_source=issue_template_chooser about: | Please learn how to report security vulnerabilities here. @@ -11,12 +11,12 @@ contact_links: a prompt response. For more information, see - https://docs.ansible.com/ansible/latest/community/reporting_bugs_and_features.html + https://docs.ansible.com/ansible/devel/community/reporting_bugs_and_features.html - name: 📝 Ansible Code of Conduct - url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser + url: https://docs.ansible.com/ansible/devel/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser about: ❤ Be nice to other members of the community. ☮ Behave. -- name: 💬 Talks to the community - url: https://docs.ansible.com/ansible/latest/community/communication.html?utm_medium=github&utm_source=issue_template_chooser#mailing-list-information +- name: 💬 Talk to the community + url: https://docs.ansible.com/ansible/devel/community/communication.html?utm_medium=github&utm_source=issue_template_chooser#mailing-list-information about: Please ask and answer usage questions here - name: ⚡ Working groups url: https://github.com/ansible/community/wiki diff --git a/.github/ISSUE_TEMPLATE/documentation_report.yml b/.github/ISSUE_TEMPLATE/documentation_report.yml index ca62bb55a77..efe8d1c2035 100644 --- a/.github/ISSUE_TEMPLATE/documentation_report.yml +++ b/.github/ISSUE_TEMPLATE/documentation_report.yml @@ -22,12 +22,14 @@ body: Also test if the latest release and devel branch are affected too. - **Tip:** If you are seeking community support, please consider - [starting a mailing list thread or chatting in IRC][ML||IRC]. + **Tip:** If you are seeking community support, please see + [Communicating with the Ansible community][communication] to + get in touch and ask questions. - [ML||IRC]: - https://docs.ansible.com/ansible-core/devel/community/communication.html?utm_medium=github&utm_source=issue_form--documentation_report.yml#mailing-list-information + + [communication]: + https://docs.ansible.com/ansible/devel/community/communication.html [issue search]: ../search?q=is%3Aissue&type=issues @@ -205,7 +207,7 @@ body: description: | Read the [Ansible Code of Conduct][CoC] first. - [CoC]: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--documentation_report.yml + [CoC]: https://docs.ansible.com/ansible/devel/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--documentation_report.yml options: - label: I agree to follow the Ansible Code of Conduct required: true diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index dd39c40de1c..2fce680fe64 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -21,8 +21,7 @@ body: If unsure, consider filing a [new proposal] instead outlining your use-cases, the research and implementation considerations. Then, - start a discussion on one of the public [IRC meetings] we have just - for this. + start a discussion in the [Ansible forum][forum].
@@ -44,21 +43,22 @@ body: Also test if the devel branch does not already implement this. - **Tip:** If you are seeking community support, please consider - [starting a mailing list thread or chatting in IRC][ML||IRC]. + **Tip:** If you are seeking community support, please see + [Communicating with the Ansible community][communication] to + get in touch and ask questions. [contribute to collections]: https://docs.ansible.com/ansible-core/devel/community/contributing_maintained_collections.html?utm_medium=github&utm_source=issue_form--feature_request.yml - [IRC meetings]: - https://docs.ansible.com/ansible-core/devel/community/communication.html?utm_medium=github&utm_source=issue_form--feature_request.yml#irc-meetings + [communication]: + https://docs.ansible.com/ansible/devel/community/communication.html [issue search]: ../search?q=is%3Aissue&type=issues - [ML||IRC]: - https://docs.ansible.com/ansible-core/devel/community/communication.html?utm_medium=github&utm_source=issue_form--feature_request.yml#mailing-list-information + [forum help]: + https://forum.ansible.com/c/help/6 [new proposal]: ../../proposals/issues/new @@ -109,7 +109,7 @@ body: Why? - We would do it by ourselves but unfortunatelly, the curent + We would do it by ourselves but unfortunately, the current edition of GitHub Issue Forms Alpha does not support this yet 🤷 @@ -185,7 +185,7 @@ body: description: | Read the [Ansible Code of Conduct][CoC] first. - [CoC]: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--feature_request.yml + [CoC]: https://docs.ansible.com/ansible/devel/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--feature_request.yml options: - label: I agree to follow the Ansible Code of Conduct required: true diff --git a/.github/RELEASE_NAMES.txt b/.github/RELEASE_NAMES.txt index 4ff8c453578..17d96a6897e 100644 --- a/.github/RELEASE_NAMES.txt +++ b/.github/RELEASE_NAMES.txt @@ -1,3 +1,4 @@ +2.19.0 What Is and What Should Never Be 2.18.0 Fool in the Rain 2.17.0 Gallows Pole 2.16.0 All My Love diff --git a/.gitignore b/.gitignore index 8b244f60ee7..57019fd1ab6 100644 --- a/.gitignore +++ b/.gitignore @@ -92,6 +92,8 @@ Vagrantfile /lib/ansible_base.egg-info/ # First used in the `devel` branch during Ansible 2.11 development. /lib/ansible_core.egg-info/ +# First used in the `devel` branch during Ansible 2.18 development. +/ansible_core.egg-info/ # vendored lib dir lib/ansible/_vendor/* !lib/ansible/_vendor/__init__.py diff --git a/MANIFEST.in b/MANIFEST.in index bf7a6a047e2..fa609f52e9a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,11 +1,10 @@ include COPYING -include bin/* include changelogs/CHANGELOG*.rst include changelogs/changelog.yaml include licenses/*.txt include requirements.txt recursive-include packaging *.py *.j2 recursive-include test/integration * -recursive-include test/sanity *.in *.json *.py *.txt +recursive-include test/sanity *.in *.json *.py *.txt *.ini recursive-include test/support *.py *.ps1 *.psm1 *.cs *.md recursive-include test/units * diff --git a/README.md b/README.md index 4db066f0901..9685e77748d 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@ [![PyPI version](https://img.shields.io/pypi/v/ansible-core.svg)](https://pypi.org/project/ansible-core) [![Docs badge](https://img.shields.io/badge/docs-latest-brightgreen.svg)](https://docs.ansible.com/ansible/latest/) -[![Chat badge](https://img.shields.io/badge/chat-IRC-brightgreen.svg)](https://docs.ansible.com/ansible/latest/community/communication.html) +[![Chat badge](https://img.shields.io/badge/chat-IRC-brightgreen.svg)](https://docs.ansible.com/ansible/devel/community/communication.html) [![Build Status](https://dev.azure.com/ansible/ansible/_apis/build/status/CI?branchName=devel)](https://dev.azure.com/ansible/ansible/_build/latest?definitionId=20&branchName=devel) -[![Ansible Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-Ansible-silver.svg)](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) -[![Ansible mailing lists](https://img.shields.io/badge/mailing%20lists-Ansible-orange.svg)](https://docs.ansible.com/ansible/latest/community/communication.html#mailing-list-information) +[![Ansible Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-Ansible-silver.svg)](https://docs.ansible.com/ansible/devel/community/code_of_conduct.html) +[![Ansible mailing lists](https://img.shields.io/badge/mailing%20lists-Ansible-orange.svg)](https://docs.ansible.com/ansible/devel/community/communication.html#mailing-list-information) [![Repository License](https://img.shields.io/badge/license-GPL%20v3.0-brightgreen.svg)](COPYING) [![Ansible CII Best Practices certification](https://bestpractices.coreinfrastructure.org/projects/2372/badge)](https://bestpractices.coreinfrastructure.org/projects/2372) @@ -40,21 +40,33 @@ features and fixes, directly. Although it is reasonably stable, you are more lik breaking changes when running the `devel` branch. We recommend getting involved in the Ansible community if you want to run the `devel` branch. -## Get Involved +## Communication -* Read [Community Information](https://docs.ansible.com/ansible/latest/community) for all +Join the Ansible forum to ask questions, get help, and interact with the +community. + +* [Get Help](https://forum.ansible.com/c/help/6): Find help or share your Ansible knowledge to help others. + Use tags to filter and subscribe to posts, such as the following: + * Posts tagged with [ansible](https://forum.ansible.com/tag/ansible) + * Posts tagged with [ansible-core](https://forum.ansible.com/tag/ansible-core) + * Posts tagged with [playbook](https://forum.ansible.com/tag/playbook) +* [Social Spaces](https://forum.ansible.com/c/chat/4): Meet and interact with fellow enthusiasts. +* [News & Announcements](https://forum.ansible.com/c/news/5): Track project-wide announcements including social events. +* [Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn): Get release announcements and important changes. + +For more ways to get in touch, see [Communicating with the Ansible community](https://docs.ansible.com/ansible/devel/community/communication.html). + +## Contribute to Ansible + +* Check out the [Contributor's Guide](./.github/CONTRIBUTING.md). +* Read [Community Information](https://docs.ansible.com/ansible/devel/community) for all kinds of ways to contribute to and interact with the project, - including mailing list information and how to submit bug reports and - code to Ansible. -* Join a [Working Group](https://docs.ansible.com/ansible/devel/community/communication.html#working-groups), - an organized community devoted to a specific technology domain or platform. + including how to submit bug reports and code to Ansible. * Submit a proposed code update through a pull request to the `devel` branch. * Talk to us before making larger changes to avoid duplicate efforts. This not only helps everyone know what is going on, but it also helps save time and effort if we decide some changes are needed. -* For a list of email lists, IRC channels and Working Groups, see the - [Communication page](https://docs.ansible.com/ansible/devel/community/communication.html) ## Coding Guidelines @@ -67,7 +79,7 @@ We document our Coding Guidelines in the [Developer Guide](https://docs.ansible. * The `devel` branch corresponds to the release actively under development. * The `stable-2.X` branches correspond to stable releases. -* Create a branch based on `devel` and set up a [dev environment](https://docs.ansible.com/ansible/latest/dev_guide/developing_modules_general.html#common-environment-setup) if you want to open a PR. +* Create a branch based on `devel` and set up a [dev environment](https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_general.html#common-environment-setup) if you want to open a PR. * See the [Ansible release and maintenance](https://docs.ansible.com/ansible/devel/reference_appendices/release_and_maintenance.html) page for information about active branches. ## Roadmap diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 2f4d649c4d9..231ace8c768 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1,2 +1,2 @@ -ancestor: 2.17.0 +ancestor: 2.18.0 releases: {} diff --git a/changelogs/fragments/72321_git.yml b/changelogs/fragments/72321_git.yml deleted file mode 100644 index 8ba0ca7558c..00000000000 --- a/changelogs/fragments/72321_git.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: -- git - check if git version is available or not before using it for comparison (https://github.com/ansible/ansible/issues/72321). diff --git a/changelogs/fragments/82307-handlers-lockstep-linear-fix.yml b/changelogs/fragments/82307-handlers-lockstep-linear-fix.yml deleted file mode 100644 index da97a9753bb..00000000000 --- a/changelogs/fragments/82307-handlers-lockstep-linear-fix.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - Fix handlers not being executed in lockstep using the linear strategy in some cases (https://github.com/ansible/ansible/issues/82307) diff --git a/changelogs/fragments/82535-properly-quote-shell.yml b/changelogs/fragments/82535-properly-quote-shell.yml deleted file mode 100644 index be93f30c59d..00000000000 --- a/changelogs/fragments/82535-properly-quote-shell.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: -- shell plugin - properly quote all needed components of shell commands (https://github.com/ansible/ansible/issues/82535) diff --git a/changelogs/fragments/82671-ansible-doc-role-examples.yml b/changelogs/fragments/82671-ansible-doc-role-examples.yml deleted file mode 100644 index 7f041babc5b..00000000000 --- a/changelogs/fragments/82671-ansible-doc-role-examples.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - "ansible-doc - show examples in role entrypoint argument specs (https://github.com/ansible/ansible/pull/82671)." diff --git a/changelogs/fragments/82831_countme_yum_repository.yml b/changelogs/fragments/82831_countme_yum_repository.yml deleted file mode 100644 index 7f6bec4c487..00000000000 --- a/changelogs/fragments/82831_countme_yum_repository.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - support the countme option when using yum_repository diff --git a/changelogs/fragments/82878-fetch-dest-is-dir.yml b/changelogs/fragments/82878-fetch-dest-is-dir.yml deleted file mode 100644 index bf9c3bf088a..00000000000 --- a/changelogs/fragments/82878-fetch-dest-is-dir.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: -- >- - fetch - add error message when using ``dest`` with a trailing slash that becomes a local directory - - https://github.com/ansible/ansible/issues/82878 diff --git a/changelogs/fragments/82941.yml b/changelogs/fragments/82941.yml deleted file mode 100644 index d15d7d3ebd1..00000000000 --- a/changelogs/fragments/82941.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- -removed_features: - - paramiko_ssh - removed deprecated ssh_extra_args from the paramiko_ssh connection plugin (https://github.com/ansible/ansible/issues/82941). - - paramiko_ssh - removed deprecated ssh_common_args from the paramiko_ssh connection plugin (https://github.com/ansible/ansible/issues/82940). - - paramiko_ssh - removed deprecated ssh_args from the paramiko_ssh connection plugin (https://github.com/ansible/ansible/issues/82939). diff --git a/changelogs/fragments/82946.yml b/changelogs/fragments/82946.yml deleted file mode 100644 index 23f1886a5c9..00000000000 --- a/changelogs/fragments/82946.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -removed_features: - - play_context - remove deprecated PlayContext.verbosity property (https://github.com/ansible/ansible/issues/82945). diff --git a/changelogs/fragments/82947.yml b/changelogs/fragments/82947.yml deleted file mode 100644 index 8ff16288a84..00000000000 --- a/changelogs/fragments/82947.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -removed_features: - - loader - remove deprecated non-inclusive words (https://github.com/ansible/ansible/issues/82947). diff --git a/changelogs/fragments/83031.yml b/changelogs/fragments/83031.yml deleted file mode 100644 index 32d7c09529d..00000000000 --- a/changelogs/fragments/83031.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - lookup - Fixed examples of csv lookup plugin (https://github.com/ansible/ansible/issues/83031). diff --git a/changelogs/fragments/83155-ansible-doc-paragraphs.yml b/changelogs/fragments/83155-ansible-doc-paragraphs.yml deleted file mode 100644 index b92bd526b77..00000000000 --- a/changelogs/fragments/83155-ansible-doc-paragraphs.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - "ansible-doc - format top-level descriptions with multiple paragraphs as multiple paragraphs, instead of concatenating them (https://github.com/ansible/ansible/pull/83155)." diff --git a/changelogs/fragments/83327.yml b/changelogs/fragments/83327.yml deleted file mode 100644 index 8cdd448aa5d..00000000000 --- a/changelogs/fragments/83327.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - fixed unit test test_borken_cowsay to address mock not been properly applied when existing unix system already have cowsay installed. \ No newline at end of file diff --git a/changelogs/fragments/83331.yml b/changelogs/fragments/83331.yml deleted file mode 100644 index 9de98f282ba..00000000000 --- a/changelogs/fragments/83331.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - facts - add facts about x86_64 flags to detect microarchitecture (https://github.com/ansible/ansible/issues/83331). diff --git a/changelogs/fragments/83392-fix-memory-issues-handlers.yml b/changelogs/fragments/83392-fix-memory-issues-handlers.yml deleted file mode 100644 index 7e4c2e5599f..00000000000 --- a/changelogs/fragments/83392-fix-memory-issues-handlers.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - Fix rapid memory usage growth when notifying handlers using the ``listen`` keyword (https://github.com/ansible/ansible/issues/83392) diff --git a/changelogs/fragments/83642-fix-sanity-ignore-for-uri.yml b/changelogs/fragments/83642-fix-sanity-ignore-for-uri.yml new file mode 100644 index 00000000000..14ff7a0723e --- /dev/null +++ b/changelogs/fragments/83642-fix-sanity-ignore-for-uri.yml @@ -0,0 +1,2 @@ +bugfixes: + - uri - mark ``url`` as required (https://github.com/ansible/ansible/pull/83642). diff --git a/changelogs/fragments/83643-fix-sanity-ignore-for-copy.yml b/changelogs/fragments/83643-fix-sanity-ignore-for-copy.yml new file mode 100644 index 00000000000..07d6312cb4d --- /dev/null +++ b/changelogs/fragments/83643-fix-sanity-ignore-for-copy.yml @@ -0,0 +1,3 @@ +minor_changes: + - copy - parameter ``local_follow`` was incorrectly documented as having default value ``True`` (https://github.com/ansible/ansible/pull/83643). + - copy - fix sanity test failures (https://github.com/ansible/ansible/pull/83643). diff --git a/changelogs/fragments/83690-get_url-content-disposition-filename.yml b/changelogs/fragments/83690-get_url-content-disposition-filename.yml new file mode 100644 index 00000000000..47f9734c35e --- /dev/null +++ b/changelogs/fragments/83690-get_url-content-disposition-filename.yml @@ -0,0 +1,2 @@ +bugfixes: + - get_url - fix honoring ``filename`` from the ``content-disposition`` header even when the type is ``inline`` (https://github.com/ansible/ansible/issues/83690) diff --git a/changelogs/fragments/83700-enable-file-disable-diff.yml b/changelogs/fragments/83700-enable-file-disable-diff.yml new file mode 100644 index 00000000000..4fdc9feb4c7 --- /dev/null +++ b/changelogs/fragments/83700-enable-file-disable-diff.yml @@ -0,0 +1,2 @@ +minor_changes: + - file - enable file module to disable diff_mode (https://github.com/ansible/ansible/issues/80817). diff --git a/changelogs/fragments/83965-action-groups-schema.yml b/changelogs/fragments/83965-action-groups-schema.yml new file mode 100644 index 00000000000..cd4a439044d --- /dev/null +++ b/changelogs/fragments/83965-action-groups-schema.yml @@ -0,0 +1,2 @@ +minor_changes: + - "runtime-metadata sanity test - improve validation of ``action_groups`` (https://github.com/ansible/ansible/pull/83965)." diff --git a/changelogs/fragments/84008-additional-logging.yml b/changelogs/fragments/84008-additional-logging.yml new file mode 100644 index 00000000000..80bd3a7ddd9 --- /dev/null +++ b/changelogs/fragments/84008-additional-logging.yml @@ -0,0 +1,3 @@ +minor_changes: + - Added a -vvvvv log message indicating when a host fails to produce output within the timeout period. + - SSH Escalation-related -vvv log messages now include the associated host information. diff --git a/changelogs/fragments/84019-ignore_unreachable-loop.yml b/changelogs/fragments/84019-ignore_unreachable-loop.yml new file mode 100644 index 00000000000..da85af7e4b5 --- /dev/null +++ b/changelogs/fragments/84019-ignore_unreachable-loop.yml @@ -0,0 +1,2 @@ +bugfixes: + - Fix returning 'unreachable' for the overall task result. This prevents false positives when a looped task has unignored unreachable items (https://github.com/ansible/ansible/issues/84019). diff --git a/changelogs/fragments/84149-add-flush-cache-for-adhoc-commands.yml b/changelogs/fragments/84149-add-flush-cache-for-adhoc-commands.yml new file mode 100644 index 00000000000..854d2628b64 --- /dev/null +++ b/changelogs/fragments/84149-add-flush-cache-for-adhoc-commands.yml @@ -0,0 +1,3 @@ +minor_changes: +- > + ansible, ansible-console, ansible-pull - add --flush-cache option (https://github.com/ansible/ansible/issues/83749). diff --git a/changelogs/fragments/84229-windows-server-2025.yml b/changelogs/fragments/84229-windows-server-2025.yml new file mode 100644 index 00000000000..82c16371a34 --- /dev/null +++ b/changelogs/fragments/84229-windows-server-2025.yml @@ -0,0 +1,4 @@ +minor_changes: + - >- + Windows - Add support for Windows Server 2025 to Ansible and as an ``ansible-test`` + remote target - https://github.com/ansible/ansible/issues/84229 diff --git a/changelogs/fragments/PowerShell-AddType-temp.yml b/changelogs/fragments/PowerShell-AddType-temp.yml deleted file mode 100644 index 6019f8058ed..00000000000 --- a/changelogs/fragments/PowerShell-AddType-temp.yml +++ /dev/null @@ -1,7 +0,0 @@ -bugfixes: -- >- - powershell - Implement more robust deletion mechanism for C# code compilation - temporary files. This should avoid scenarios where the underlying temporary - directory may be temporarily locked by antivirus tools or other IO problems. - A failure to delete one of these temporary directories will result in a - warning rather than an outright failure. diff --git a/changelogs/fragments/action-plugin-docs-sidecar.yml b/changelogs/fragments/action-plugin-docs-sidecar.yml deleted file mode 100644 index c9faa07140a..00000000000 --- a/changelogs/fragments/action-plugin-docs-sidecar.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: -- ansible-test action-plugin-docs - Fix to check for sidecar documentation for action plugins diff --git a/changelogs/fragments/add_systemd_facts.yml b/changelogs/fragments/add_systemd_facts.yml deleted file mode 100644 index 93af448a7f3..00000000000 --- a/changelogs/fragments/add_systemd_facts.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - facts - add systemd version and features diff --git a/changelogs/fragments/ansible-config-validate.yml b/changelogs/fragments/ansible-config-validate.yml deleted file mode 100644 index fab48db9026..00000000000 --- a/changelogs/fragments/ansible-config-validate.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - ansible-config has new 'validate' option to find mispelled/forgein configurations in ini file or environment variables. diff --git a/changelogs/fragments/ansible-doc-color.yml b/changelogs/fragments/ansible-doc-color.yml deleted file mode 100644 index 045f212a58e..00000000000 --- a/changelogs/fragments/ansible-doc-color.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - ansible-doc - make colors configurable. diff --git a/changelogs/fragments/ansible-doc-inicate.yml b/changelogs/fragments/ansible-doc-inicate.yml deleted file mode 100644 index 519730869fd..00000000000 --- a/changelogs/fragments/ansible-doc-inicate.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - ansible-doc - fixed "inicates" typo in output diff --git a/changelogs/fragments/ansible-drop-python-3.7.yml b/changelogs/fragments/ansible-drop-python-3.7.yml deleted file mode 100644 index 73313a383df..00000000000 --- a/changelogs/fragments/ansible-drop-python-3.7.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - The minimum supported Python version on targets is now Python 3.8. diff --git a/changelogs/fragments/ansible-galaxy-role-install-symlink.yml b/changelogs/fragments/ansible-galaxy-role-install-symlink.yml deleted file mode 100644 index c2003b15cd2..00000000000 --- a/changelogs/fragments/ansible-galaxy-role-install-symlink.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - ansible-galaxy role install - fix symlinks (https://github.com/ansible/ansible/issues/82702, https://github.com/ansible/ansible/issues/81965). diff --git a/changelogs/fragments/ansible-test-container-update.yml b/changelogs/fragments/ansible-test-container-update.yml deleted file mode 100644 index de9d5afbf12..00000000000 --- a/changelogs/fragments/ansible-test-container-update.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - ansible-test - Update ``base`` and ``default`` containers to omit Python 3.7. diff --git a/changelogs/fragments/ansible-test-coverage-update.yml b/changelogs/fragments/ansible-test-coverage-update.yml deleted file mode 100644 index 93fe8e42e00..00000000000 --- a/changelogs/fragments/ansible-test-coverage-update.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - ansible-test - Update ``coverage`` to version 7.5.3. diff --git a/changelogs/fragments/ansible-test-fix-command-traceback.yml b/changelogs/fragments/ansible-test-fix-command-traceback.yml new file mode 100644 index 00000000000..d43294006f9 --- /dev/null +++ b/changelogs/fragments/ansible-test-fix-command-traceback.yml @@ -0,0 +1,2 @@ +bugfixes: + - ansible-test - Fix traceback that occurs after an interactive command fails. diff --git a/changelogs/fragments/ansible-test-http-test-container-update.yml b/changelogs/fragments/ansible-test-http-test-container-update.yml deleted file mode 100644 index 1b76ac52852..00000000000 --- a/changelogs/fragments/ansible-test-http-test-container-update.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - ansible-test - Update ``http-test-container`` to version 3.0.0. diff --git a/changelogs/fragments/ansible-test-nios-container.yml b/changelogs/fragments/ansible-test-nios-container.yml new file mode 100644 index 00000000000..f4b2a99acdd --- /dev/null +++ b/changelogs/fragments/ansible-test-nios-container.yml @@ -0,0 +1,2 @@ +minor_changes: + - ansible-test - Update ``nios-test-container`` to version 6.0.0. diff --git a/changelogs/fragments/ansible-test-probe-error-handling.yml b/changelogs/fragments/ansible-test-probe-error-handling.yml new file mode 100644 index 00000000000..bf4301cc48b --- /dev/null +++ b/changelogs/fragments/ansible-test-probe-error-handling.yml @@ -0,0 +1,3 @@ +minor_changes: + - ansible-test - Improve container runtime probe error handling. + When unexpected probe output is encountered, an error with more useful debugging information is provided. diff --git a/changelogs/fragments/ansible-test-pylint-fix.yml b/changelogs/fragments/ansible-test-pylint-fix.yml new file mode 100644 index 00000000000..877a5944967 --- /dev/null +++ b/changelogs/fragments/ansible-test-pylint-fix.yml @@ -0,0 +1,4 @@ +bugfixes: + - ansible-test - Enable the ``sys.unraisablehook`` work-around for the ``pylint`` sanity test on Python 3.11. + Previously the work-around was only enabled for Python 3.12 and later. + However, the same issue has been discovered on Python 3.11. diff --git a/changelogs/fragments/ansible-test-pypi-test-container-update.yml b/changelogs/fragments/ansible-test-pypi-test-container-update.yml deleted file mode 100644 index 67be470e43c..00000000000 --- a/changelogs/fragments/ansible-test-pypi-test-container-update.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - ansible-test - Update ``pypi-test-container`` to version 3.0.0. diff --git a/changelogs/fragments/ansible-test-remotes.yml b/changelogs/fragments/ansible-test-remotes.yml new file mode 100644 index 00000000000..cf3c832c8e8 --- /dev/null +++ b/changelogs/fragments/ansible-test-remotes.yml @@ -0,0 +1,2 @@ +minor_changes: + - ansible-test - Replace remote FreeBSD 13.3 with 13.4. diff --git a/changelogs/fragments/ansible-test-update.yml b/changelogs/fragments/ansible-test-update.yml new file mode 100644 index 00000000000..8431887dedb --- /dev/null +++ b/changelogs/fragments/ansible-test-update.yml @@ -0,0 +1,5 @@ +minor_changes: + - ansible-test - Update ``pylint`` sanity test to use version 3.3.1. + - ansible-test - Default to Python 3.13 in the ``base`` and ``default`` containers. + - ansible-test - Disable the ``deprecated-`` prefixed ``pylint`` rules as their results vary by Python version. + - ansible-test - Update the ``base`` and ``default`` containers. diff --git a/changelogs/fragments/ansible-test-utility-container-update.yml b/changelogs/fragments/ansible-test-utility-container-update.yml deleted file mode 100644 index 5d9ca669d95..00000000000 --- a/changelogs/fragments/ansible-test-utility-container-update.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - ansible-test - Update ``ansible-test-utility-container`` to version 3.0.0. diff --git a/changelogs/fragments/ansible_connection_path.yml b/changelogs/fragments/ansible_connection_path.yml deleted file mode 100644 index d1eb1866fbe..00000000000 --- a/changelogs/fragments/ansible_connection_path.yml +++ /dev/null @@ -1,8 +0,0 @@ -bugfixes: - - persistent connection plugins - The correct Ansible persistent connection helper is now always used. - Previously, the wrong script could be used, depending on the value of the ``PATH`` environment variable. - As a result, users were sometimes required to set ``ANSIBLE_CONNECTION_PATH`` to use the correct script. -deprecated_features: - - persistent connection plugins - The ``ANSIBLE_CONNECTION_PATH`` config option no longer has any effect, and will be removed in a future release. -breaking_changes: - - persistent connection plugins - The ``ANSIBLE_CONNECTION_PATH`` config option no longer has any effect. diff --git a/changelogs/fragments/ansible_managed_restore.yml b/changelogs/fragments/ansible_managed_restore.yml deleted file mode 100644 index 63d15bf9dca..00000000000 --- a/changelogs/fragments/ansible_managed_restore.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - ansible_managed restored it's 'templatability' by ensuring the possible injection routes are cut off earlier in the process. diff --git a/changelogs/fragments/apk_package_facts.yml b/changelogs/fragments/apk_package_facts.yml deleted file mode 100644 index 6eb1a351625..00000000000 --- a/changelogs/fragments/apk_package_facts.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - package_facts - apk fix when cache is empty (https://github.com/ansible/ansible/issues/83126). diff --git a/changelogs/fragments/apt_cache.yml b/changelogs/fragments/apt_cache.yml deleted file mode 100644 index d407431e034..00000000000 --- a/changelogs/fragments/apt_cache.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - apt_* - add more info messages raised while updating apt cache (https://github.com/ansible/ansible/issues/77941). diff --git a/changelogs/fragments/assemble.yml b/changelogs/fragments/assemble.yml deleted file mode 100644 index 27b66551492..00000000000 --- a/changelogs/fragments/assemble.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - assemble - update argument_spec with 'decrypt' option which is required by action plugin (https://github.com/ansible/ansible/issues/80840). diff --git a/changelogs/fragments/become-runas-system-deux.yml b/changelogs/fragments/become-runas-system-deux.yml new file mode 100644 index 00000000000..e8b17f92a4c --- /dev/null +++ b/changelogs/fragments/become-runas-system-deux.yml @@ -0,0 +1,3 @@ +bugfixes: + - >- + runas become - Fix up become logic to still get the SYSTEM token with the most privileges when running as SYSTEM. diff --git a/changelogs/fragments/buildroot.yml b/changelogs/fragments/buildroot.yml new file mode 100644 index 00000000000..18acd5438e0 --- /dev/null +++ b/changelogs/fragments/buildroot.yml @@ -0,0 +1,3 @@ +--- +bugfixes: + - user - Create Buildroot subclass as alias to Busybox (https://github.com/ansible/ansible/issues/83665). diff --git a/changelogs/fragments/cleanup-outdated-galaxy-install-info.yml b/changelogs/fragments/cleanup-outdated-galaxy-install-info.yml deleted file mode 100644 index 7fde75f13e7..00000000000 --- a/changelogs/fragments/cleanup-outdated-galaxy-install-info.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - ansible-galaxy collection install - remove old installation info when installing collections (https://github.com/ansible/ansible/issues/83182). diff --git a/changelogs/fragments/compat_removal.yml b/changelogs/fragments/compat_removal.yml new file mode 100644 index 00000000000..86da5d9933a --- /dev/null +++ b/changelogs/fragments/compat_removal.yml @@ -0,0 +1,3 @@ +--- +removed_features: + - removed deprecated pycompat24 and compat.importlib. diff --git a/changelogs/fragments/config.yml b/changelogs/fragments/config.yml new file mode 100644 index 00000000000..e7b7d6f808a --- /dev/null +++ b/changelogs/fragments/config.yml @@ -0,0 +1,3 @@ +--- +removed_features: + - Remove deprecated plural form of collection path (https://github.com/ansible/ansible/pull/84156). diff --git a/changelogs/fragments/config_init_fix.yml b/changelogs/fragments/config_init_fix.yml deleted file mode 100644 index f3b402dbdd4..00000000000 --- a/changelogs/fragments/config_init_fix.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - ansible-config will now properly template defaults before dumping them. diff --git a/changelogs/fragments/correct-callback-fqcn-old-style-action-invocation.yml b/changelogs/fragments/correct-callback-fqcn-old-style-action-invocation.yml deleted file mode 100644 index 675439604ea..00000000000 --- a/changelogs/fragments/correct-callback-fqcn-old-style-action-invocation.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: - - Fix the task attribute ``resolved_action`` to show the FQCN instead of ``None`` when ``action`` or ``local_action`` is used in the playbook. - - Fix using ``module_defaults`` with ``local_action``/``action`` (https://github.com/ansible/ansible/issues/81905). diff --git a/changelogs/fragments/cron_err.yml b/changelogs/fragments/cron_err.yml new file mode 100644 index 00000000000..5e65a7b68ec --- /dev/null +++ b/changelogs/fragments/cron_err.yml @@ -0,0 +1,3 @@ +--- +minor_changes: + - cron - Provide additional error information while writing cron file (https://github.com/ansible/ansible/issues/83223). diff --git a/changelogs/fragments/cve-2024-8775.yml b/changelogs/fragments/cve-2024-8775.yml new file mode 100644 index 00000000000..a292c997044 --- /dev/null +++ b/changelogs/fragments/cve-2024-8775.yml @@ -0,0 +1,5 @@ +security_fixes: + - task result processing - Ensure that action-sourced result masking (``_ansible_no_log=True``) + is preserved. (CVE-2024-8775) + - include_vars action - Ensure that result masking is correctly requested when vault-encrypted + files are read. (CVE-2024-8775) diff --git a/changelogs/fragments/darwin_facts.yml b/changelogs/fragments/darwin_facts.yml deleted file mode 100644 index bad6d97a3c7..00000000000 --- a/changelogs/fragments/darwin_facts.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - Darwin - add unit tests for Darwin hardware fact gathering. diff --git a/changelogs/fragments/debconf_empty_password.yml b/changelogs/fragments/debconf_empty_password.yml new file mode 100644 index 00000000000..473dc53e0d5 --- /dev/null +++ b/changelogs/fragments/debconf_empty_password.yml @@ -0,0 +1,3 @@ +--- +bugfixes: + - debconf - set empty password values (https://github.com/ansible/ansible/issues/83214). diff --git a/changelogs/fragments/deprecated.yml b/changelogs/fragments/deprecated.yml new file mode 100644 index 00000000000..aa632c0487d --- /dev/null +++ b/changelogs/fragments/deprecated.yml @@ -0,0 +1,3 @@ +--- +minor_changes: + - docs - add collection name in message from which the module is being deprecated (https://github.com/ansible/ansible/issues/84116). diff --git a/changelogs/fragments/dnf-installroot-substitutions.yml b/changelogs/fragments/dnf-installroot-substitutions.yml deleted file mode 100644 index aef96f4e2b3..00000000000 --- a/changelogs/fragments/dnf-installroot-substitutions.yml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: - - dnf - honor installroot for ``cachedir``, ``logdir`` and ``persistdir`` - - dnf - perform variable substitutions in ``logdir`` and ``persistdir`` diff --git a/changelogs/fragments/dnf5-api-breaks.yml b/changelogs/fragments/dnf5-api-breaks.yml deleted file mode 100644 index 99c9ecd6cfc..00000000000 --- a/changelogs/fragments/dnf5-api-breaks.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - dnf5 - replace removed API calls diff --git a/changelogs/fragments/dnf5-enable-disable-plugins.yml b/changelogs/fragments/dnf5-enable-disable-plugins.yml deleted file mode 100644 index 5d4eacfbac0..00000000000 --- a/changelogs/fragments/dnf5-enable-disable-plugins.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - dnf5 - implement ``enable_plugin`` and ``disable_plugin`` options diff --git a/changelogs/fragments/dnf5-plugins-compat.yml b/changelogs/fragments/dnf5-plugins-compat.yml new file mode 100644 index 00000000000..5d42b0f99f1 --- /dev/null +++ b/changelogs/fragments/dnf5-plugins-compat.yml @@ -0,0 +1,2 @@ +bugfixes: + - "dnf5 - fix traceback when ``enable_plugins``/``disable_plugins`` is used on ``python3-libdnf5`` versions that do not support this functionality" diff --git a/changelogs/fragments/dnf_cache_path.yml b/changelogs/fragments/dnf_cache_path.yml deleted file mode 100644 index 50b72f9a109..00000000000 --- a/changelogs/fragments/dnf_cache_path.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - dnf - Substitute variables in DNF cache path (https://github.com/ansible/ansible/pull/80094). diff --git a/changelogs/fragments/enabled_runtime.yml b/changelogs/fragments/enabled_runtime.yml deleted file mode 100644 index a3166307747..00000000000 --- a/changelogs/fragments/enabled_runtime.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - systemd - extend systemctl is-enabled check to handle "enabled-runtime" (https://github.com/ansible/ansible/pull/77754). diff --git a/changelogs/fragments/file_hardlink.yml b/changelogs/fragments/file_hardlink.yml deleted file mode 100644 index 26c5eeaf7c6..00000000000 --- a/changelogs/fragments/file_hardlink.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - file - retrieve the link's full path when hard linking a soft link with follow (https://github.com/ansible/ansible/issues/33911). diff --git a/changelogs/fragments/file_simplify.yml b/changelogs/fragments/file_simplify.yml new file mode 100644 index 00000000000..63e48fbdb9a --- /dev/null +++ b/changelogs/fragments/file_simplify.yml @@ -0,0 +1,3 @@ +--- +minor_changes: + - file - make code more readable and simple. diff --git a/changelogs/fragments/find-checksum.yml b/changelogs/fragments/find-checksum.yml new file mode 100644 index 00000000000..c713beabd68 --- /dev/null +++ b/changelogs/fragments/find-checksum.yml @@ -0,0 +1,2 @@ +minor_changes: + - find - add a checksum_algorithm parameter to specify which type of checksum the module will return diff --git a/changelogs/fragments/fix-ansible-galaxy-ignore-certs.yml b/changelogs/fragments/fix-ansible-galaxy-ignore-certs.yml new file mode 100644 index 00000000000..aba789bdadd --- /dev/null +++ b/changelogs/fragments/fix-ansible-galaxy-ignore-certs.yml @@ -0,0 +1,2 @@ +bugfixes: + - Fix disabling SSL verification when installing collections and roles from git repositories. If ``--ignore-certs`` isn't provided, the value for the ``GALAXY_IGNORE_CERTS`` configuration option will be used (https://github.com/ansible/ansible/issues/83326). diff --git a/changelogs/fragments/fix-ipv6-pattern.yml b/changelogs/fragments/fix-ipv6-pattern.yml new file mode 100644 index 00000000000..48b18150527 --- /dev/null +++ b/changelogs/fragments/fix-ipv6-pattern.yml @@ -0,0 +1,2 @@ +bugfixes: + - Fix ipv6 pattern bug in lib/ansible/parsing/utils/addresses.py (https://github.com/ansible/ansible/issues/84237) \ No newline at end of file diff --git a/changelogs/fragments/fix-module-utils-facts-timeout.yml b/changelogs/fragments/fix-module-utils-facts-timeout.yml new file mode 100644 index 00000000000..3ecc95dfab3 --- /dev/null +++ b/changelogs/fragments/fix-module-utils-facts-timeout.yml @@ -0,0 +1,2 @@ +bugfixes: + - Use the requested error message in the ansible.module_utils.facts.timeout timeout function instead of hardcoding one. diff --git a/changelogs/fragments/fix_errors.yml b/changelogs/fragments/fix_errors.yml new file mode 100644 index 00000000000..995cc28ffda --- /dev/null +++ b/changelogs/fragments/fix_errors.yml @@ -0,0 +1,2 @@ +bugfixes: + - Errors now preserve stacked error messages even when YAML is involved. diff --git a/changelogs/fragments/fix_log_verbosity.yml b/changelogs/fragments/fix_log_verbosity.yml deleted file mode 100644 index 72ccb3a5528..00000000000 --- a/changelogs/fragments/fix_log_verbosity.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - ensure we have logger before we log when we have increased verbosity. diff --git a/changelogs/fragments/freebsd_disk_regex.yml b/changelogs/fragments/freebsd_disk_regex.yml deleted file mode 100644 index 74930f212c9..00000000000 --- a/changelogs/fragments/freebsd_disk_regex.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - freebsd - update disk and slices regex for fact gathering (https://github.com/ansible/ansible/pull/82081). diff --git a/changelogs/fragments/freebsd_facts_refactor.yml b/changelogs/fragments/freebsd_facts_refactor.yml deleted file mode 100644 index f0f01354f76..00000000000 --- a/changelogs/fragments/freebsd_facts_refactor.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - freebsd - refactor dmidecode fact gathering code for simplicity. diff --git a/changelogs/fragments/freebsd_service.yml b/changelogs/fragments/freebsd_service.yml deleted file mode 100644 index dcf5f8cc005..00000000000 --- a/changelogs/fragments/freebsd_service.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - service - fix order of CLI arguments on FreeBSD (https://github.com/ansible/ansible/pull/81377). diff --git a/changelogs/fragments/gather-s390-sysinfo.yml b/changelogs/fragments/gather-s390-sysinfo.yml deleted file mode 100644 index 7a9a60d0ff8..00000000000 --- a/changelogs/fragments/gather-s390-sysinfo.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: -- fact gathering - Gather /proc/sysinfo facts on s390 Linux on Z diff --git a/changelogs/fragments/libvirt_lxc.yml b/changelogs/fragments/libvirt_lxc.yml new file mode 100644 index 00000000000..7d575756983 --- /dev/null +++ b/changelogs/fragments/libvirt_lxc.yml @@ -0,0 +1,3 @@ +--- +bugfixes: + - base.yml - deprecated libvirt_lxc_noseclabel config. diff --git a/changelogs/fragments/listify.yml b/changelogs/fragments/listify.yml deleted file mode 100644 index c77458bcc5d..00000000000 --- a/changelogs/fragments/listify.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -removed_features: - - utils/listify - remove deprecated 'loader' argument from listify_lookup_plugin_terms API (https://github.com/ansible/ansible/issues/82949). diff --git a/changelogs/fragments/mask_me.yml b/changelogs/fragments/mask_me.yml deleted file mode 100644 index 57aac99aa9a..00000000000 --- a/changelogs/fragments/mask_me.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - systemd_service - handle mask operation failure (https://github.com/ansible/ansible/issues/81649). diff --git a/changelogs/fragments/no-return.yml b/changelogs/fragments/no-return.yml new file mode 100644 index 00000000000..b55db43eb2f --- /dev/null +++ b/changelogs/fragments/no-return.yml @@ -0,0 +1,2 @@ +minor_changes: + - module_utils - Add ``NoReturn`` type annotations to functions which never return. diff --git a/changelogs/fragments/os_family.yml b/changelogs/fragments/os_family.yml new file mode 100644 index 00000000000..7126a00c27b --- /dev/null +++ b/changelogs/fragments/os_family.yml @@ -0,0 +1,3 @@ +--- +bugfixes: + - facts - skip if distribution file path is directory, instead of raising error (https://github.com/ansible/ansible/issues/84006). diff --git a/changelogs/fragments/package-dnf-action-plugins-facts-fail-msg.yml b/changelogs/fragments/package-dnf-action-plugins-facts-fail-msg.yml new file mode 100644 index 00000000000..8dd037a4e02 --- /dev/null +++ b/changelogs/fragments/package-dnf-action-plugins-facts-fail-msg.yml @@ -0,0 +1,2 @@ +bugfixes: + - "``package``/``dnf`` action plugins - provide the reason behind the failure to gather the ``ansible_pkg_mgr`` fact to identify the package backend" diff --git a/changelogs/fragments/package_facts_aliases.yml b/changelogs/fragments/package_facts_aliases.yml deleted file mode 100644 index 9e408ff6eba..00000000000 --- a/changelogs/fragments/package_facts_aliases.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - package_facts module now supports using aliases for supported package managers, for example managers=yum or managers=dnf will resolve to using the underlying rpm. diff --git a/changelogs/fragments/package_facts_fix.yml b/changelogs/fragments/package_facts_fix.yml new file mode 100644 index 00000000000..f1ffbf4d641 --- /dev/null +++ b/changelogs/fragments/package_facts_fix.yml @@ -0,0 +1,2 @@ +bugfixes: + - package_facts module when using 'auto' will return the first package manager found that provides an output, instead of just the first one, as this can be foreign and not have any packages. diff --git a/changelogs/fragments/passlib.yml b/changelogs/fragments/passlib.yml new file mode 100644 index 00000000000..b6bf883ae6f --- /dev/null +++ b/changelogs/fragments/passlib.yml @@ -0,0 +1,3 @@ +--- +removed_features: + - encrypt - passing unsupported passlib hashtype now raises AnsibleFilterError. diff --git a/changelogs/fragments/psrp-version-req.yml b/changelogs/fragments/psrp-version-req.yml deleted file mode 100644 index 1a1ccf0661d..00000000000 --- a/changelogs/fragments/psrp-version-req.yml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: -- Add a version ceiling constraint for pypsrp to avoid potential breaking changes in the 1.0.0 release. diff --git a/changelogs/fragments/python-3.13.yml b/changelogs/fragments/python-3.13.yml deleted file mode 100644 index 43a8021a921..00000000000 --- a/changelogs/fragments/python-3.13.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - ansible-test - Add support for Python 3.13. - - Add ``python3.13`` to the default ``INTERPRETER_PYTHON_FALLBACK`` list. diff --git a/changelogs/fragments/remove-deprecated-gather-facts-config.yml b/changelogs/fragments/remove-deprecated-gather-facts-config.yml deleted file mode 100644 index 9a7037458af..00000000000 --- a/changelogs/fragments/remove-deprecated-gather-facts-config.yml +++ /dev/null @@ -1,11 +0,0 @@ -bugfixes: -- >- - Remove deprecated config options DEFAULT_FACT_PATH, DEFAULT_GATHER_SUBSET, and - DEFAULT_GATHER_TIMEOUT in favor of setting ``fact_path``, ``gather_subset`` - and ``gather_timeout`` as ``module_defaults`` for ``ansible.builtin.setup``. - - These will apply to both the ``gather_facts`` play keyword, and any - ``ansible.builtin.setup`` tasks. - - To configure these options only for the ``gather_facts`` keyword, set these - options as play keywords also. diff --git a/changelogs/fragments/remove-deprecated-get_delegated_vars.yml b/changelogs/fragments/remove-deprecated-get_delegated_vars.yml deleted file mode 100644 index 8cc0659ee57..00000000000 --- a/changelogs/fragments/remove-deprecated-get_delegated_vars.yml +++ /dev/null @@ -1,2 +0,0 @@ -removed_features: - - Remove deprecated `VariableManager._get_delegated_vars` method (https://github.com/ansible/ansible/issues/82950) diff --git a/changelogs/fragments/remove-deprecated-role-cache.yml b/changelogs/fragments/remove-deprecated-role-cache.yml deleted file mode 100644 index ff8e14fc19a..00000000000 --- a/changelogs/fragments/remove-deprecated-role-cache.yml +++ /dev/null @@ -1,2 +0,0 @@ -removed_features: - - Play - removed deprecated ``ROLE_CACHE`` property in favor of ``role_cache``. diff --git a/changelogs/fragments/remove-deprecated-vars-syntax.yml b/changelogs/fragments/remove-deprecated-vars-syntax.yml deleted file mode 100644 index ec9fb9fd2ac..00000000000 --- a/changelogs/fragments/remove-deprecated-vars-syntax.yml +++ /dev/null @@ -1,2 +0,0 @@ -removed_features: - - Removed support for setting the ``vars`` keyword to lists of dictionaries. It is now required to be a single dictionary. diff --git a/changelogs/fragments/remove-python-2-compat.yml b/changelogs/fragments/remove-python-2-compat.yml deleted file mode 100644 index aaf6031b8f7..00000000000 --- a/changelogs/fragments/remove-python-2-compat.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - unarchive - Remove Python 2.7 compatibility imports. - - ansible-test - Remove Python 2.7 compatibility imports. diff --git a/changelogs/fragments/remove_ini_ignored_dir.yml b/changelogs/fragments/remove_ini_ignored_dir.yml new file mode 100644 index 00000000000..10a5a8e61ce --- /dev/null +++ b/changelogs/fragments/remove_ini_ignored_dir.yml @@ -0,0 +1,2 @@ +minor_changes: + - INVENTORY_IGNORE_EXTS config, removed ``ini`` from the default list, inventory scripts using a corresponding .ini configuration are rare now and inventory.ini files are more common. Those that need to ignore the ini files for inventory scripts can still add it to configuration. diff --git a/changelogs/fragments/selector_removal.yml b/changelogs/fragments/selector_removal.yml new file mode 100644 index 00000000000..681686f72e4 --- /dev/null +++ b/changelogs/fragments/selector_removal.yml @@ -0,0 +1,3 @@ +--- +removed_features: + - selector - remove deprecated compat.selector related files (https://github.com/ansible/ansible/pull/84155). diff --git a/changelogs/fragments/service_facts_fbsd.yml b/changelogs/fragments/service_facts_fbsd.yml new file mode 100644 index 00000000000..6f06ab79f23 --- /dev/null +++ b/changelogs/fragments/service_facts_fbsd.yml @@ -0,0 +1,2 @@ +minor_changes: + - service_facts module got freebsd support added. diff --git a/changelogs/fragments/skip-handlers-tagged-play.yml b/changelogs/fragments/skip-handlers-tagged-play.yml new file mode 100644 index 00000000000..755308eafbe --- /dev/null +++ b/changelogs/fragments/skip-handlers-tagged-play.yml @@ -0,0 +1,2 @@ +bugfixes: + - "Do not run implicit ``flush_handlers`` meta tasks when the whole play is excluded from the run due to tags specified." diff --git a/changelogs/fragments/skip-implicit-flush_handlers-no-notify.yml b/changelogs/fragments/skip-implicit-flush_handlers-no-notify.yml new file mode 100644 index 00000000000..a4c913791d2 --- /dev/null +++ b/changelogs/fragments/skip-implicit-flush_handlers-no-notify.yml @@ -0,0 +1,2 @@ +bugfixes: + - "Improve performance on large inventories by reducing the number of implicit meta tasks." diff --git a/changelogs/fragments/skip-role-task-iterator.yml b/changelogs/fragments/skip-role-task-iterator.yml new file mode 100644 index 00000000000..1cf6b4cbb84 --- /dev/null +++ b/changelogs/fragments/skip-role-task-iterator.yml @@ -0,0 +1,2 @@ +minor_changes: + - PlayIterator - do not return tasks from already executed roles so specific strategy plugins do not have to do the filtering of such tasks themselves diff --git a/changelogs/fragments/string_conversion.yml b/changelogs/fragments/string_conversion.yml new file mode 100644 index 00000000000..58032896171 --- /dev/null +++ b/changelogs/fragments/string_conversion.yml @@ -0,0 +1,3 @@ +--- +removed_features: + - Removed deprecated STRING_CONVERSION_ACTION (https://github.com/ansible/ansible/issues/84220). diff --git a/changelogs/fragments/sysctl_fact_fix.yml b/changelogs/fragments/sysctl_fact_fix.yml deleted file mode 100644 index 55f51b91428..00000000000 --- a/changelogs/fragments/sysctl_fact_fix.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - setup/gather_facts will skip missing ``sysctl`` instead of being a fatal error (https://github.com/ansible/ansible/pull/81297). diff --git a/changelogs/fragments/timeout_show_frame.yml b/changelogs/fragments/timeout_show_frame.yml deleted file mode 100644 index fcdb61beef2..00000000000 --- a/changelogs/fragments/timeout_show_frame.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - task timeout now returns timedout key with frame/code that was in execution when the timeout is triggered. diff --git a/changelogs/fragments/update-resolvelib-lt-2_0_0.yml b/changelogs/fragments/update-resolvelib-lt-2_0_0.yml new file mode 100644 index 00000000000..10c4f1a0838 --- /dev/null +++ b/changelogs/fragments/update-resolvelib-lt-2_0_0.yml @@ -0,0 +1,2 @@ +minor_changes: + - ansible-galaxy - support ``resolvelib >= 0.5.3, < 2.0.0`` (https://github.com/ansible/ansible/issues/84217). diff --git a/changelogs/fragments/uri_follow_redirect.yml b/changelogs/fragments/uri_follow_redirect.yml deleted file mode 100644 index 1df21a486cb..00000000000 --- a/changelogs/fragments/uri_follow_redirect.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - uri - deprecate 'yes' and 'no' value for 'follow_redirects' parameter. diff --git a/changelogs/fragments/user_action_fix.yml b/changelogs/fragments/user_action_fix.yml new file mode 100644 index 00000000000..64ee997d688 --- /dev/null +++ b/changelogs/fragments/user_action_fix.yml @@ -0,0 +1,2 @@ +bugfixes: + - user module now avoids changing ownership of files symlinked in provided home dir skeleton diff --git a/changelogs/fragments/user_ssh_fix.yml b/changelogs/fragments/user_ssh_fix.yml new file mode 100644 index 00000000000..b2c47d60e3a --- /dev/null +++ b/changelogs/fragments/user_ssh_fix.yml @@ -0,0 +1,4 @@ +bugfixes: + - user action will now require O(force) to overwrite the public part of an ssh key when generating ssh keys, as was already the case for the private part. +security_fixes: + - user action won't allow ssh-keygen, chown and chmod to run on existing ssh public key file, avoiding traversal on existing symlinks (CVE-2024-9902). diff --git a/changelogs/fragments/v2.18.0-initial-commit.yaml b/changelogs/fragments/v2.19.0-initial-commit.yaml similarity index 100% rename from changelogs/fragments/v2.18.0-initial-commit.yaml rename to changelogs/fragments/v2.19.0-initial-commit.yaml diff --git a/changelogs/fragments/vmware_facts.yml b/changelogs/fragments/vmware_facts.yml deleted file mode 100644 index 257fe90c3fb..00000000000 --- a/changelogs/fragments/vmware_facts.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -bugfixes: - - facts - add a generic detection for VMware in product name. diff --git a/changelogs/fragments/yum_repository.yml b/changelogs/fragments/yum_repository.yml deleted file mode 100644 index 508760614dc..00000000000 --- a/changelogs/fragments/yum_repository.yml +++ /dev/null @@ -1,9 +0,0 @@ -deprecated_features: - - yum_repository - deprecate ``async`` option as it has been removed in RHEL 8 and will be removed in ansible-core 2.22. - - >- - yum_repository - the following options are deprecated: ``deltarpm_metadata_percentage``, ``gpgcakey``, ``http_caching``, - ``keepalive``, ``metadata_expire_filter``, ``mirrorlist_expire``, ``protect``, ``ssl_check_cert_permissions``, - ``ui_repoid_vars`` as they have no effect for dnf as an underlying package manager. - The options will be removed in ansible-core 2.22. -minor_changes: - - yum_repository - add ``excludepkgs`` alias to the ``exclude`` option. diff --git a/hacking/README.md b/hacking/README.md index 51f17202ed5..534a7e4db0e 100644 --- a/hacking/README.md +++ b/hacking/README.md @@ -5,7 +5,7 @@ env-setup --------- The 'env-setup' script modifies your environment to allow you to run -ansible from a git checkout using python >= 3.10. +ansible from a git checkout using python >= 3.11. First, set up your environment to run from the checkout: @@ -18,7 +18,7 @@ and do not wish to install them from your operating system package manager, you can install them from pip ```shell -easy_install pip # if pip is not already available +python -Im ensurepip # if pip is not already available pip install -r requirements.txt ``` diff --git a/hacking/backport/backport_of_line_adder.py b/hacking/backport/backport_of_line_adder.py index 9856b9f0e01..70d03efd2c6 100755 --- a/hacking/backport/backport_of_line_adder.py +++ b/hacking/backport/backport_of_line_adder.py @@ -32,14 +32,14 @@ TICKET_NUMBER = re.compile(r'(?:^|\s)#(\d+)') def normalize_pr_url(pr, allow_non_ansible_ansible=False, only_number=False): - ''' + """ Given a PullRequest, or a string containing a PR number, PR URL, or internal PR URL (e.g. ansible-collections/community.general#1234), return either a full github URL to the PR (if only_number is False), or an int containing the PR number (if only_number is True). Throws if it can't parse the input. - ''' + """ if isinstance(pr, PullRequest): return pr.html_url @@ -71,10 +71,10 @@ def normalize_pr_url(pr, allow_non_ansible_ansible=False, only_number=False): def url_to_org_repo(url): - ''' + """ Given a full Github PR URL, extract the user/org and repo name. Return them in the form: "user/repo" - ''' + """ match = PULL_HTTP_URL_RE.match(url) if not match: return '' @@ -82,7 +82,7 @@ def url_to_org_repo(url): def generate_new_body(pr, source_pr): - ''' + """ Given the new PR (the backport) and the originating (source) PR, construct the new body for the backport PR. @@ -93,7 +93,7 @@ def generate_new_body(pr, source_pr): This function does not side-effect, it simply returns the new body as a string. - ''' + """ backport_text = '\nBackport of {0}\n'.format(source_pr) body_lines = pr.body.split('\n') new_body_lines = [] @@ -115,10 +115,10 @@ def generate_new_body(pr, source_pr): def get_prs_for_commit(g, commit): - ''' + """ Given a commit hash, attempt to find the hash in any repo in the ansible orgs, and then use it to determine what, if any, PR it appeared in. - ''' + """ commits = g.search_commits( 'hash:{0} org:ansible org:ansible-collections is:public'.format(commit) @@ -132,7 +132,7 @@ def get_prs_for_commit(g, commit): def search_backport(pr, g, ansible_ansible): - ''' + """ Do magic. This is basically the "brain" of 'auto'. It will search the PR (the newest PR - the backport) and try to find where it originated. @@ -148,7 +148,7 @@ def search_backport(pr, g, ansible_ansible): It will take all of the above, and return a list of "possibilities", which is a list of PullRequest objects. - ''' + """ possibilities = [] @@ -198,20 +198,20 @@ def search_backport(pr, g, ansible_ansible): def prompt_add(): - ''' + """ Prompt the user and return whether or not they agree. - ''' + """ res = input('Shall I add the reference? [Y/n]: ') return res.lower() in ('', 'y', 'yes') def commit_edit(new_pr, pr): - ''' + """ Given the new PR (the backport), and the "possibility" that we have decided on, prompt the user and then add the reference to the body of the new PR. This method does the actual "destructive" work of editing the PR body. - ''' + """ print('I think this PR might have come from:') print(pr.title) print('-' * 50) diff --git a/hacking/create-bulk-issues.py b/hacking/create-bulk-issues.py index d2651415df1..09c79590e22 100755 --- a/hacking/create-bulk-issues.py +++ b/hacking/create-bulk-issues.py @@ -35,6 +35,7 @@ class Issue: body: str project: str labels: list[str] | None = None + assignee: str | None = None def create(self) -> str: cmd = ['gh', 'issue', 'create', '--title', self.title, '--body', self.body, '--project', self.project] @@ -43,8 +44,18 @@ class Issue: for label in self.labels: cmd.extend(('--label', label)) - process = subprocess.run(cmd, capture_output=True, check=True) - url = process.stdout.decode().strip() + if self.assignee: + cmd.extend(('--assignee', self.assignee)) + + try: + process = subprocess.run(cmd, capture_output=True, check=True, text=True) + except subprocess.CalledProcessError as ex: + print('>>> Note') + print(f"You may need to run 'gh auth refresh -s project' if 'gh' reports it cannot find the project {self.project!r} when it exists.") + print(f'>>> Standard Output\n{ex.stdout.strip()}\n>>> Standard Error\n{ex.stderr.strip()}\n>>> Exception') + raise + + url = process.stdout.strip() return url @@ -54,6 +65,7 @@ class Feature: summary: str component: str labels: list[str] | None = None + assignee: str | None = None @staticmethod def from_dict(data: dict[str, t.Any]) -> Feature: @@ -61,6 +73,7 @@ class Feature: summary = data.get('summary') component = data.get('component') labels = data.get('labels') + assignee = data.get('assignee') if not isinstance(title, str): raise RuntimeError(f'`title` is not `str`: {title}') @@ -71,6 +84,9 @@ class Feature: if not isinstance(component, str): raise RuntimeError(f'`component` is not `str`: {component}') + if not isinstance(assignee, (str, type(None))): + raise RuntimeError(f'`assignee` is not `str`: {assignee}') + if not isinstance(labels, list) or not all(isinstance(item, str) for item in labels): raise RuntimeError(f'`labels` is not `list[str]`: {labels}') @@ -79,6 +95,7 @@ class Feature: summary=summary, component=component, labels=labels, + assignee=assignee, ) def create_issue(self, project: str) -> Issue: @@ -102,6 +119,7 @@ Feature Idea body=body.strip(), project=project, labels=self.labels, + assignee=self.assignee, ) @@ -297,7 +315,21 @@ def create_deprecation_parser(subparser) -> None: def create_feature_parser(subparser) -> None: - parser: argparse.ArgumentParser = subparser.add_parser('feature') + epilog = """ +Example source YAML: + +default: + component: ansible-test + labels: + - ansible-test + - feature + assignee: "@me" +features: + - title: Some title goes here + summary: A summary goes here. +""" + + parser: argparse.ArgumentParser = subparser.add_parser('feature', epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter) parser.set_defaults(type=FeatureArgs) parser.set_defaults(command=feature_command) diff --git a/hacking/env-setup b/hacking/env-setup index 0a86e0fe4fb..df1ea4020f2 100644 --- a/hacking/env-setup +++ b/hacking/env-setup @@ -57,22 +57,6 @@ expr "$PYTHONPATH" : "${ANSIBLE_TEST_PREFIX_PYTHONPATH}.*" > /dev/null || prepen expr "$PATH" : "${PREFIX_PATH}.*" > /dev/null || prepend_path PATH "$PREFIX_PATH" expr "$MANPATH" : "${PREFIX_MANPATH}.*" > /dev/null || prepend_path MANPATH "$PREFIX_MANPATH" -# -# Generate egg_info so that pkg_resources works -# - -# Do the work in a function so we don't repeat ourselves later -gen_egg_info() -{ - # check for current and past egg-info directory names - if ls "$PREFIX_PYTHONPATH"/ansible*.egg-info >/dev/null 2>&1; then - # bypass shell aliases with leading backslash - # see https://github.com/ansible/ansible/pull/11967 - \rm -rf "$PREFIX_PYTHONPATH"/ansible*.egg-info - fi - "$PYTHON_BIN" setup.py egg_info -} - if [ "$ANSIBLE_DEV_HOME" != "$PWD" ] ; then current_dir="$PWD" else @@ -81,10 +65,8 @@ fi ( cd "$ANSIBLE_DEV_HOME" if [ "$verbosity" = silent ] ; then - gen_egg_info > /dev/null 2>&1 & find . -type f -name "*.pyc" -exec rm -f {} \; > /dev/null 2>&1 else - gen_egg_info find . -type f -name "*.pyc" -exec rm -f {} \; fi cd "$current_dir" diff --git a/hacking/env-setup.fish b/hacking/env-setup.fish index 529b57333ef..fcb739bf0cd 100644 --- a/hacking/env-setup.fish +++ b/hacking/env-setup.fish @@ -3,9 +3,23 @@ # Description: Modifies the environment for running Ansible from a checkout # Usage: . ./hacking/env-setup [-q] +# Set PYTHON_BIN +if not set -q PYTHON_BIN + for exe in python3 python + if command -v $exe > /dev/null + set -gx PYTHON_BIN (command -v $exe) + break + end + end + if not set -q PYTHON_BIN + echo "No valid Python found" + exit 1 + end +end + # Retrieve the path of the current directory where the script resides set HACKING_DIR (dirname (status -f)) -set FULL_PATH (python -c "import os; print(os.path.realpath('$HACKING_DIR'))") +set FULL_PATH ($PYTHON_BIN -c "import os; print(os.path.realpath('$HACKING_DIR'))") set ANSIBLE_HOME (dirname $FULL_PATH) # Set quiet flag @@ -50,39 +64,11 @@ else if not string match -qr $PREFIX_MANPATH'($|:)' $MANPATH set -gx MANPATH "$PREFIX_MANPATH:$MANPATH" end -# Set PYTHON_BIN -if not set -q PYTHON_BIN - for exe in python3 python - if command -v $exe > /dev/null - set -gx PYTHON_BIN (command -v $exe) - break - end - end - if not set -q PYTHON_BIN - echo "No valid Python found" - exit 1 - end -end - -# Generate egg_info so that pkg_resources works -function gen_egg_info - # Check if ansible*.egg-info directory exists and remove if found - if test -d $PREFIX_PYTHONPATH/ansible*.egg-info - rm -rf $PREFIX_PYTHONPATH/ansible*.egg-info - end - # Execute setup.py egg_info using the chosen Python interpreter - eval $PYTHON_BIN setup.py egg_info -end - pushd $ANSIBLE_HOME if test -n "$QUIET" - # Run gen_egg_info in the background and redirect output to /dev/null - gen_egg_info &> /dev/null # Remove any .pyc files found find . -type f -name "*.pyc" -exec rm -f '{}' ';' &> /dev/null else - # Run gen_egg_info - gen_egg_info # Remove any .pyc files found find . -type f -name "*.pyc" -exec rm -f '{}' ';' # Display setup details diff --git a/hacking/test-module.py b/hacking/test-module.py index 7b39798de5b..a9df1a79b8f 100755 --- a/hacking/test-module.py +++ b/hacking/test-module.py @@ -38,6 +38,8 @@ import sys import traceback import shutil +from pathlib import Path + from ansible.release import __version__ import ansible.utils.vars as utils_vars from ansible.parsing.dataloader import DataLoader @@ -89,13 +91,11 @@ def parse(): def write_argsfile(argstring, json=False): """ Write args to a file for old-style module's use. """ - argspath = os.path.expanduser("~/.ansible_test_module_arguments") - argsfile = open(argspath, 'w') + argspath = Path("~/.ansible_test_module_arguments").expanduser() if json: args = parse_kv(argstring) argstring = jsonify(args) - argsfile.write(argstring) - argsfile.close() + argspath.write_text(argstring) return argspath @@ -169,9 +169,8 @@ def boilerplate_module(modfile, args, interpreters, check, destfile): print("* including generated source, if any, saving to: %s" % modfile2_path) if module_style not in ('ansiballz', 'old'): print("* this may offset any line numbers in tracebacks/debuggers!") - modfile2 = open(modfile2_path, 'wb') - modfile2.write(module_data) - modfile2.close() + with open(modfile2_path, 'wb') as modfile2: + modfile2.write(module_data) modfile = modfile2_path return (modfile2_path, modname, module_style) diff --git a/hacking/ticket_stubs/bug_wrong_repo.md b/hacking/ticket_stubs/bug_wrong_repo.md index 704ab5ed547..ed115232a20 100644 --- a/hacking/ticket_stubs/bug_wrong_repo.md +++ b/hacking/ticket_stubs/bug_wrong_repo.md @@ -8,7 +8,7 @@ This appears to be something that should be filed against another project or bug << CHOOSE AS APPROPRIATE >> -* +* * * * @@ -16,16 +16,15 @@ This appears to be something that should be filed against another project or bug * * * -* For AAP or Tower licensees report issues via your Red Hat representative or +* For AAP Customer issues please see If you can stop by the tracker or forum for one of those projects, we'd appreciate it. Because this project is very active, we're unlikely to see comments made on closed tickets and we lock them after some time. Should you still wish to discuss things further, or if you disagree with our thought process, please stop by one of our two mailing lists: -* +* [ansible-core on the Ansible Forum](https://forum.ansible.com/tag/ansible-core) * Matrix: [#devel:ansible.im](https://matrix.to/#/#devel:ansible.im) -* IRC: #ansible-devel on [irc.libera.chat](https://libera.chat/) We'd be happy to discuss things. diff --git a/hacking/ticket_stubs/proposal.md b/hacking/ticket_stubs/proposal.md index 4b672856f36..2d8182f12be 100644 --- a/hacking/ticket_stubs/proposal.md +++ b/hacking/ticket_stubs/proposal.md @@ -6,10 +6,9 @@ If you are still interested in seeing this new feature get into Ansible, please Because this project is very active, we're unlikely to see comments made on closed tickets and we lock them after some time. -The mailing list and irc are great ways to ask questions, or post if you don't think this particular issue is resolved. +The Forum is the best ways to ask questions, or post if you don't think this particular issue is resolved. -* #ansible-devel on [irc.libera.chat](https://libera.chat/) -* +* Or check this page for a more complete list of communication channels and their purposes: diff --git a/hacking/ticket_stubs/question_not_bug.md b/hacking/ticket_stubs/question_not_bug.md index b53b367464c..dab0d2edba1 100644 --- a/hacking/ticket_stubs/question_not_bug.md +++ b/hacking/ticket_stubs/question_not_bug.md @@ -2,12 +2,11 @@ Hi! Thanks very much for your interest in Ansible. It means a lot to us. -This appears to be a user question, and we'd like to direct these kinds of things to either the mailing list or the IRC channel. +This appears to be a user question, and we'd like to direct these topic to the Ansible Forum. -* IRC: #ansible on [irc.libera.chat](https://libera.chat/) -* mailing list: +* [Ansible Forum](https://forum.ansible.com) -See this page for a complete and up to date list of communication channels and their purposes: +See this page for a complete and up to date list of communication channels and their purposes: * diff --git a/hacking/ticket_stubs/wider_discussion.md b/hacking/ticket_stubs/wider_discussion.md index e8b13b34924..3ab9073f443 100644 --- a/hacking/ticket_stubs/wider_discussion.md +++ b/hacking/ticket_stubs/wider_discussion.md @@ -8,11 +8,10 @@ Reasons for this include: * INSERT REASONS! Because this project is very active, we're unlikely to see comments made on closed tickets and we lock them after some time. -Can you please post on ansible-development list so we can talk about this idea with the wider group? +Can you please post Ansible Forum so we can talk about this idea with the wider group? -* +* [Ansible Core on the Ansible Forum](https://forum.ansible.com/tag/ansible-core) * Matrix: [#devel:ansible.im](https://matrix.to/#/#devel:ansible.im) -* #ansible-devel on [irc.libera.chat](https://libera.chat/) For other alternatives, check this page for a more complete list of communication channels and their purposes: diff --git a/hacking/update-sanity-requirements.py b/hacking/update-sanity-requirements.py index 997d6dbf87a..aaaa803cde8 100755 --- a/hacking/update-sanity-requirements.py +++ b/hacking/update-sanity-requirements.py @@ -52,7 +52,6 @@ class SanityTest: if pip_freeze.stdout: raise Exception(f'Initial virtual environment is not empty:\n{pip_freeze.stdout}') - subprocess.run(pip + ['install', 'wheel'], env=env, check=True) # make bdist_wheel available during pip install subprocess.run(pip + ['install', '-r', self.source_path], env=env, check=True) freeze_options = ['--all'] diff --git a/lib/ansible/__main__.py b/lib/ansible/__main__.py index cb7006285b4..afdd2849739 100644 --- a/lib/ansible/__main__.py +++ b/lib/ansible/__main__.py @@ -3,9 +3,6 @@ from __future__ import annotations import argparse -import importlib -import os -import sys from importlib.metadata import distribution @@ -19,22 +16,10 @@ def main(): ep_map = {_short_name(ep.name): ep for ep in dist.entry_points if ep.group == 'console_scripts'} parser = argparse.ArgumentParser(prog='python -m ansible', add_help=False) - parser.add_argument('entry_point', choices=list(ep_map) + ['test']) + parser.add_argument('entry_point', choices=list(ep_map)) args, extra = parser.parse_known_args() - if args.entry_point == 'test': - ansible_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - source_root = os.path.join(ansible_root, 'test', 'lib') - - if os.path.exists(os.path.join(source_root, 'ansible_test', '_internal', '__init__.py')): - # running from source, use that version of ansible-test instead of any version that may already be installed - sys.path.insert(0, source_root) - - module = importlib.import_module('ansible_test._util.target.cli.ansible_test_cli_stub') - main = module.main - else: - main = ep_map[args.entry_point].load() - + main = ep_map[args.entry_point].load() main([args.entry_point] + extra) diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py index b8da2dbd50f..03a2b3e854a 100644 --- a/lib/ansible/cli/__init__.py +++ b/lib/ansible/cli/__init__.py @@ -11,9 +11,9 @@ import sys # Used for determining if the system is running a new enough python version # and should only restrict on our documented minimum versions -if sys.version_info < (3, 10): +if sys.version_info < (3, 11): raise SystemExit( - 'ERROR: Ansible requires Python 3.10 or newer on the controller. ' + 'ERROR: Ansible requires Python 3.11 or newer on the controller. ' 'Current version: %s' % ''.join(sys.version.splitlines()) ) @@ -116,7 +116,7 @@ except ImportError: class CLI(ABC): - ''' code behind bin/ansible* programs ''' + """ code behind bin/ansible* programs """ PAGER = C.config.get_config_value('PAGER') @@ -167,19 +167,7 @@ class CLI(ABC): else: display.v(u"No config file found; using defaults") - # warn about deprecated config options - for deprecated in C.config.DEPRECATED: - name = deprecated[0] - why = deprecated[1]['why'] - if 'alternatives' in deprecated[1]: - alt = ', use %s instead' % deprecated[1]['alternatives'] - else: - alt = '' - ver = deprecated[1].get('version') - date = deprecated[1].get('date') - collection_name = deprecated[1].get('collection_name') - display.deprecated("%s option, %s%s" % (name, why, alt), - version=ver, date=date, collection_name=collection_name) + C.handle_config_noise(display) @staticmethod def split_vault_id(vault_id): @@ -329,7 +317,7 @@ class CLI(ABC): @staticmethod def ask_passwords(): - ''' prompt for connection and become passwords if needed ''' + """ prompt for connection and become passwords if needed """ op = context.CLIARGS sshpass = None @@ -359,7 +347,7 @@ class CLI(ABC): return (sshpass, becomepass) def validate_conflicts(self, op, runas_opts=False, fork_opts=False): - ''' check for conflicting options ''' + """ check for conflicting options """ if fork_opts: if op.forks < 1: @@ -471,7 +459,7 @@ class CLI(ABC): @staticmethod def version_info(gitinfo=False): - ''' return full ansible version info ''' + """ return full ansible version info """ if gitinfo: # expensive call, user with care ansible_version_string = opt_help.version() @@ -497,7 +485,7 @@ class CLI(ABC): @staticmethod def pager(text): - ''' find reasonable way to display text ''' + """ find reasonable way to display text """ # this is a much simpler form of what is in pydoc.py if not sys.stdout.isatty(): display.display(text, screen_only=True) @@ -516,7 +504,7 @@ class CLI(ABC): @staticmethod def pager_pipe(text): - ''' pipe text through a pager ''' + """ pipe text through a pager """ if 'less' in CLI.PAGER: os.environ['LESS'] = CLI.LESS_OPTS try: @@ -566,8 +554,19 @@ class CLI(ABC): # the code, ensuring a consistent view of global variables variable_manager = VariableManager(loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) + # flush fact cache if requested + if options['flush_cache']: + CLI._flush_cache(inventory, variable_manager) + return loader, inventory, variable_manager + @staticmethod + def _flush_cache(inventory, variable_manager): + variable_manager.clear_facts('localhost') + for host in inventory.list_hosts(): + hostname = host.get_name() + variable_manager.clear_facts(hostname) + @staticmethod def get_host_list(inventory, subset, pattern='all'): @@ -615,9 +614,8 @@ class CLI(ABC): else: try: - f = open(b_pwd_file, "rb") - secret = f.read().strip() - f.close() + with open(b_pwd_file, "rb") as f: + secret = f.read().strip() except (OSError, IOError) as e: raise AnsibleError("Could not read password file %s: %s" % (pwd_file, e)) diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py index efe99b99105..830e5823cfd 100755 --- a/lib/ansible/cli/adhoc.py +++ b/lib/ansible/cli/adhoc.py @@ -24,14 +24,14 @@ display = Display() class AdHocCLI(CLI): - ''' is an extra-simple tool/framework/API for doing 'remote things'. + """ is an extra-simple tool/framework/API for doing 'remote things'. this command allows you to define and run a single task 'playbook' against a set of hosts - ''' + """ name = 'ansible' def init_parser(self): - ''' create an options parser for bin/ansible ''' + """ create an options parser for bin/ansible """ super(AdHocCLI, self).init_parser(usage='%prog [options]', desc="Define and run a single task 'playbook' against a set of hosts", epilog="Some actions do not make sense in Ad-Hoc (include, meta, etc)") @@ -60,7 +60,7 @@ class AdHocCLI(CLI): self.parser.add_argument('args', metavar='pattern', help='host pattern') def post_process_args(self, options): - '''Post process and validate options for bin/ansible ''' + """Post process and validate options for bin/ansible """ options = super(AdHocCLI, self).post_process_args(options) @@ -98,7 +98,7 @@ class AdHocCLI(CLI): tasks=[mytask]) def run(self): - ''' create and execute the single task playbook ''' + """ create and execute the single task playbook """ super(AdHocCLI, self).run() diff --git a/lib/ansible/cli/arguments/option_helpers.py b/lib/ansible/cli/arguments/option_helpers.py index daa7a9a9b2f..18adc16455a 100644 --- a/lib/ansible/cli/arguments/option_helpers.py +++ b/lib/ansible/cli/arguments/option_helpers.py @@ -297,14 +297,14 @@ def add_inventory_options(parser): help='outputs a list of matching hosts; does not execute anything else') parser.add_argument('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset', help='further limit selected hosts to an additional pattern') + parser.add_argument('--flush-cache', dest='flush_cache', action='store_true', + help="clear the fact cache for every host in inventory") def add_meta_options(parser): """Add options for commands which can launch meta tasks from the command line""" parser.add_argument('--force-handlers', default=C.DEFAULT_FORCE_HANDLERS, dest='force_handlers', action='store_true', help="run handlers even if a task fails") - parser.add_argument('--flush-cache', dest='flush_cache', action='store_true', - help="clear the fact cache for every host in inventory") def add_module_options(parser): diff --git a/lib/ansible/cli/config.py b/lib/ansible/cli/config.py index e17a26f369d..cd801212fca 100755 --- a/lib/ansible/cli/config.py +++ b/lib/ansible/cli/config.py @@ -47,14 +47,14 @@ def yaml_short(data): def get_constants(): - ''' helper method to ensure we can template based on existing constants ''' + """ helper method to ensure we can template based on existing constants """ if not hasattr(get_constants, 'cvars'): get_constants.cvars = {k: getattr(C, k) for k in dir(C) if not k.startswith('__')} return get_constants.cvars def _ansible_env_vars(varname): - ''' return true or false depending if variable name is possibly a 'configurable' ansible env variable ''' + """ return true or false depending if variable name is possibly a 'configurable' ansible env variable """ return all( [ varname.startswith("ANSIBLE_"), @@ -188,9 +188,9 @@ class ConfigCLI(CLI): context.CLIARGS['func']() def execute_update(self): - ''' + """ Updates a single setting in the specified ansible.cfg - ''' + """ raise AnsibleError("Option not implemented yet") # pylint: disable=unreachable @@ -212,9 +212,9 @@ class ConfigCLI(CLI): ]) def execute_view(self): - ''' + """ Displays the current config file - ''' + """ try: with open(self.config_file, 'rb') as f: self.pager(to_text(f.read(), errors='surrogate_or_strict')) @@ -222,9 +222,9 @@ class ConfigCLI(CLI): raise AnsibleError("Failed to open config file: %s" % to_native(e)) def execute_edit(self): - ''' + """ Opens ansible.cfg in the default EDITOR - ''' + """ raise AnsibleError("Option not implemented yet") # pylint: disable=unreachable @@ -266,9 +266,9 @@ class ConfigCLI(CLI): return entries def _list_entries_from_args(self): - ''' + """ build a dict with the list requested configs - ''' + """ config_entries = {} if context.CLIARGS['type'] in ('base', 'all'): @@ -294,9 +294,9 @@ class ConfigCLI(CLI): return config_entries def execute_list(self): - ''' + """ list and output available configs - ''' + """ config_entries = self._list_entries_from_args() if context.CLIARGS['format'] == 'yaml': @@ -506,7 +506,7 @@ class ConfigCLI(CLI): # prep loading loader = getattr(plugin_loader, '%s_loader' % ptype) - # acumulators + # accumulators output = [] config_entries = {} @@ -523,7 +523,7 @@ class ConfigCLI(CLI): plugin_cs = loader.all(class_only=True) for plugin in plugin_cs: - # in case of deprecastion they diverge + # in case of deprecation they diverge finalname = name = plugin._load_name if name.startswith('_'): if os.path.islink(plugin._original_path): @@ -599,9 +599,9 @@ class ConfigCLI(CLI): return output def execute_dump(self): - ''' + """ Shows the current settings, merges ansible.cfg if specified - ''' + """ output = [] if context.CLIARGS['type'] in ('base', 'all'): # deal with base diff --git a/lib/ansible/cli/console.py b/lib/ansible/cli/console.py index 5805b97fce8..6f355938aa5 100755 --- a/lib/ansible/cli/console.py +++ b/lib/ansible/cli/console.py @@ -35,7 +35,7 @@ display = Display() class ConsoleCLI(CLI, cmd.Cmd): - ''' + """ A REPL that allows for running ad-hoc tasks against a chosen inventory from a nice shell with built-in tab completion (based on dominis' ``ansible-shell``). @@ -62,7 +62,7 @@ class ConsoleCLI(CLI, cmd.Cmd): - ``help [command/module]``: display documentation for the command or module - ``exit``: exit ``ansible-console`` - ''' + """ name = 'ansible-console' modules = [] # type: list[str] | None @@ -545,7 +545,7 @@ class ConsoleCLI(CLI, cmd.Cmd): if path: module_loader.add_directory(path) - # dynamically add 'cannonical' modules as commands, aliases coudld be used and dynamically loaded + # dynamically add 'canonical' modules as commands, aliases could be used and dynamically loaded self.modules = self.list_modules() for module in self.modules: setattr(self, 'do_' + module, lambda arg, module=module: self.default(module + ' ' + arg)) @@ -579,7 +579,7 @@ class ConsoleCLI(CLI, cmd.Cmd): self.cmdloop() def __getattr__(self, name): - ''' handle not found to populate dynamically a module function if module matching name exists ''' + """ handle not found to populate dynamically a module function if module matching name exists """ attr = None if name.startswith('do_'): diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py index 44fe39a597f..52ec8a6c7b1 100755 --- a/lib/ansible/cli/doc.py +++ b/lib/ansible/cli/doc.py @@ -50,7 +50,7 @@ PB_OBJECTS = ['Play', 'Role', 'Block', 'Task', 'Handler'] PB_LOADED = {} SNIPPETS = ['inventory', 'lookup', 'module'] -# harcoded from ascii values +# hardcoded from ascii values STYLE = { 'BLINK': '\033[5m', 'BOLD': '\033[1m', @@ -387,6 +387,12 @@ class RoleMixin(object): for role, collection, role_path in (roles | collroles): argspec = self._load_argspec(role, role_path, collection) + if 'error' in argspec: + if fail_on_errors: + raise argspec['exception'] + else: + display.warning('Skipping role (%s) due to: %s' % (role, argspec['error']), True) + continue fqcn, doc = self._build_doc(role, role_path, collection, argspec, entry_point) if doc: result[fqcn] = doc @@ -403,7 +409,7 @@ def _doclink(url): def _format(string, *args): - ''' add ascii formatting or delimiters ''' + """ add ascii formatting or delimiters """ for style in args: @@ -427,10 +433,10 @@ def _format(string, *args): class DocCLI(CLI, RoleMixin): - ''' displays information on modules installed in Ansible libraries. + """ displays information on modules installed in Ansible libraries. It displays a terse listing of plugins and their short descriptions, provides a printout of their DOCUMENTATION strings, - and it can create a short "snippet" which can be pasted into a playbook. ''' + and it can create a short "snippet" which can be pasted into a playbook. """ name = 'ansible-doc' @@ -844,14 +850,14 @@ class DocCLI(CLI, RoleMixin): return plugin_docs def _get_roles_path(self): - ''' + """ Add any 'roles' subdir in playbook dir to the roles search path. And as a last resort, add the playbook dir itself. Order being: - 'roles' subdir of playbook dir - DEFAULT_ROLES_PATH (default in cliargs) - playbook dir (basedir) NOTE: This matches logic in RoleDefinition._load_role_path() method. - ''' + """ roles_path = context.CLIARGS['roles_path'] if context.CLIARGS['basedir'] is not None: subdir = os.path.join(context.CLIARGS['basedir'], "roles") @@ -862,7 +868,7 @@ class DocCLI(CLI, RoleMixin): @staticmethod def _prep_loader(plugin_type): - ''' return a plugint type specific loader ''' + """ return a plugint type specific loader """ loader = getattr(plugin_loader, '%s_loader' % plugin_type) # add to plugin paths from command line @@ -887,6 +893,7 @@ class DocCLI(CLI, RoleMixin): plugin_type = context.CLIARGS['type'].lower() do_json = context.CLIARGS['json_format'] or context.CLIARGS['dump'] listing = context.CLIARGS['list_files'] or context.CLIARGS['list_dir'] + no_fail = bool(not context.CLIARGS['no_fail_on_errors']) if context.CLIARGS['list_files']: content = 'files' @@ -909,7 +916,6 @@ class DocCLI(CLI, RoleMixin): docs['all'] = {} for ptype in ptypes: - no_fail = bool(not context.CLIARGS['no_fail_on_errors']) if ptype == 'role': roles = self._create_role_list(fail_on_errors=no_fail) docs['all'][ptype] = self._create_role_doc(roles.keys(), context.CLIARGS['entry_point'], fail_on_errors=no_fail) @@ -935,7 +941,7 @@ class DocCLI(CLI, RoleMixin): if plugin_type == 'keyword': docs = DocCLI._get_keywords_docs(context.CLIARGS['args']) elif plugin_type == 'role': - docs = self._create_role_doc(context.CLIARGS['args'], context.CLIARGS['entry_point']) + docs = self._create_role_doc(context.CLIARGS['args'], context.CLIARGS['entry_point'], fail_on_errors=no_fail) else: # display specific plugin docs docs = self._get_plugins_docs(plugin_type, context.CLIARGS['args']) @@ -1052,7 +1058,7 @@ class DocCLI(CLI, RoleMixin): @staticmethod def format_snippet(plugin, plugin_type, doc): - ''' return heavily commented plugin use to insert into play ''' + """ return heavily commented plugin use to insert into play """ if plugin_type == 'inventory' and doc.get('options', {}).get('plugin'): # these do not take a yaml config that we can write a snippet for raise ValueError('The {0} inventory plugin does not take YAML type config source' @@ -1089,7 +1095,7 @@ class DocCLI(CLI, RoleMixin): text = DocCLI.get_man_text(doc, collection_name, plugin_type) except Exception as e: display.vvv(traceback.format_exc()) - raise AnsibleError("Unable to retrieve documentation from '%s' due to: %s" % (plugin, to_native(e)), orig_exc=e) + raise AnsibleError("Unable to retrieve documentation from '%s'" % (plugin), orig_exc=e) return text @@ -1134,7 +1140,7 @@ class DocCLI(CLI, RoleMixin): @staticmethod def print_paths(finder): - ''' Returns a string suitable for printing of the search path ''' + """ Returns a string suitable for printing of the search path """ # Uses a list to get the order right ret = [] @@ -1195,7 +1201,7 @@ class DocCLI(CLI, RoleMixin): opt_leadin = "-" key = "%s%s %s" % (base_indent, opt_leadin, _format(o, 'yellow')) - # description is specifically formated and can either be string or list of strings + # description is specifically formatted and can either be string or list of strings if 'description' not in opt: raise AnsibleError("All (sub-)options and return values must have a 'description' field") text.append('') @@ -1274,7 +1280,7 @@ class DocCLI(CLI, RoleMixin): DocCLI.add_fields(text, subdata, limit, opt_indent + ' ', return_values, opt_indent) def get_role_man_text(self, role, role_json): - '''Generate text for the supplied role suitable for display. + """Generate text for the supplied role suitable for display. This is similar to get_man_text(), but roles are different enough that we have a separate method for formatting their display. @@ -1283,7 +1289,7 @@ class DocCLI(CLI, RoleMixin): :param role_json: The JSON for the given role as returned from _create_role_doc(). :returns: A array of text suitable for displaying to screen. - ''' + """ text = [] opt_indent = " " pad = display.columns * 0.20 @@ -1387,16 +1393,15 @@ class DocCLI(CLI, RoleMixin): if doc.get('deprecated', False): text.append(_format("DEPRECATED: ", 'bold', 'DEP')) if isinstance(doc['deprecated'], dict): - if 'removed_at_date' in doc['deprecated']: - text.append( - "\tReason: %(why)s\n\tWill be removed in a release after %(removed_at_date)s\n\tAlternatives: %(alternative)s" % doc.pop('deprecated') - ) - else: - if 'version' in doc['deprecated'] and 'removed_in' not in doc['deprecated']: - doc['deprecated']['removed_in'] = doc['deprecated']['version'] - text.append("\tReason: %(why)s\n\tWill be removed in: Ansible %(removed_in)s\n\tAlternatives: %(alternative)s" % doc.pop('deprecated')) + if 'removed_at_date' not in doc['deprecated'] and 'version' in doc['deprecated'] and 'removed_in' not in doc['deprecated']: + doc['deprecated']['removed_in'] = doc['deprecated']['version'] + try: + text.append('\t' + C.config.get_deprecated_msg_from_config(doc['deprecated'], True, collection_name=collection_name)) + except KeyError as e: + raise AnsibleError("Invalid deprecation documentation structure", orig_exc=e) else: - text.append("%s" % doc.pop('deprecated')) + text.append("%s" % doc['deprecated']) + del doc['deprecated'] if doc.pop('has_action', False): text.append("") diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 6ea3f708eec..5e2bef6f151 100755 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -177,11 +177,11 @@ class RoleDistributionServer: class GalaxyCLI(CLI): - '''Command to manage Ansible roles and collections. + """Command to manage Ansible roles and collections. None of the CLI tools are designed to run concurrently with themselves. Use an external scheduler and/or locking to ensure there are no clashing operations. - ''' + """ name = 'ansible-galaxy' @@ -212,7 +212,7 @@ class GalaxyCLI(CLI): super(GalaxyCLI, self).__init__(args) def init_parser(self): - ''' create an options parser for bin/ansible ''' + """ create an options parser for bin/ansible """ super(GalaxyCLI, self).init_parser( desc="Perform various Role and Collection related operations.", @@ -468,12 +468,31 @@ class GalaxyCLI(CLI): ignore_errors_help = 'Ignore errors during installation and continue with the next specified ' \ 'collection. This will not ignore dependency conflict errors.' else: - args_kwargs['help'] = 'Role name, URL or tar file' + args_kwargs['help'] = 'Role name, URL or tar file. This is mutually exclusive with -r.' ignore_errors_help = 'Ignore errors and continue with the next specified role.' + if self._implicit_role: + # might install both roles and collections + description_text = ( + 'Install roles and collections from file(s), URL(s) or Ansible ' + 'Galaxy to the first entry in the config COLLECTIONS_PATH for collections ' + 'and first entry in the config ROLES_PATH for roles. ' + 'The first entry in the config ROLES_PATH can be overridden by --roles-path ' + 'or -p, but this will result in only roles being installed.' + ) + prog = 'ansible-galaxy install' + else: + prog = f"ansible-galaxy {galaxy_type} install" + description_text = ( + 'Install {0}(s) from file(s), URL(s) or Ansible ' + 'Galaxy to the first entry in the config {1}S_PATH ' + 'unless overridden by --{0}s-path.'.format(galaxy_type, galaxy_type.upper()) + ) install_parser = parser.add_parser('install', parents=parents, help='Install {0}(s) from file(s), URL(s) or Ansible ' - 'Galaxy'.format(galaxy_type)) + 'Galaxy'.format(galaxy_type), + description=description_text, + prog=prog,) install_parser.set_defaults(func=self.execute_install) install_parser.add_argument('args', metavar='{0}_name'.format(galaxy_type), nargs='*', **args_kwargs) @@ -526,8 +545,12 @@ class GalaxyCLI(CLI): 'This does not apply to collections in remote Git repositories or URLs to remote tarballs.' ) else: - install_parser.add_argument('-r', '--role-file', dest='requirements', - help='A file containing a list of roles to be installed.') + if self._implicit_role: + install_parser.add_argument('-r', '--role-file', dest='requirements', + help='A file containing a list of collections and roles to be installed.') + else: + install_parser.add_argument('-r', '--role-file', dest='requirements', + help='A file containing a list of roles to be installed.') r_re = re.compile(r'^(? [options] []', @@ -140,7 +140,7 @@ class PullCLI(CLI): if options.sleep: try: - secs = random.randint(0, int(options.sleep)) + secs = secrets.randbelow(int(options.sleep)) options.sleep = secs except ValueError: raise AnsibleOptionsError("%s is not a number." % options.sleep) @@ -157,7 +157,7 @@ class PullCLI(CLI): return options def run(self): - ''' use Runner lib to do SSH things ''' + """ use Runner lib to do SSH things """ super(PullCLI, self).run() @@ -298,6 +298,9 @@ class PullCLI(CLI): if context.CLIARGS['diff']: cmd += ' -D' + if context.CLIARGS['flush_cache']: + cmd += ' --flush-cache' + os.chdir(context.CLIARGS['dest']) # redo inventory options as new files might exist now diff --git a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py index 701dcdaa198..0c8baa9871f 100644 --- a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py +++ b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py @@ -57,10 +57,10 @@ def file_lock(lock_path): class ConnectionProcess(object): - ''' + """ The connection process wraps around a Connection object that manages the connection to a remote device that persists over the playbook - ''' + """ def __init__(self, fd, play_context, socket_path, original_path, task_uuid=None, ansible_playbook_pid=None): self.play_context = play_context self.socket_path = socket_path diff --git a/lib/ansible/cli/vault.py b/lib/ansible/cli/vault.py index 86902a695fd..8b6dc88a3de 100755 --- a/lib/ansible/cli/vault.py +++ b/lib/ansible/cli/vault.py @@ -25,7 +25,7 @@ display = Display() class VaultCLI(CLI): - ''' can encrypt any structured data file used by Ansible. + """ can encrypt any structured data file used by Ansible. This can include *group_vars/* or *host_vars/* inventory variables, variables loaded by *include_vars* or *vars_files*, or variable files passed on the ansible-playbook command line with *-e @file.yml* or *-e @file.json*. @@ -33,7 +33,7 @@ class VaultCLI(CLI): Because Ansible tasks, handlers, and other objects are data, these can also be encrypted with vault. If you'd like to not expose what variables you are using, you can keep an individual task file entirely encrypted. - ''' + """ name = 'ansible-vault' @@ -252,7 +252,7 @@ class VaultCLI(CLI): os.umask(old_umask) def execute_encrypt(self): - ''' encrypt the supplied file using the provided vault secret ''' + """ encrypt the supplied file using the provided vault secret """ if not context.CLIARGS['args'] and sys.stdin.isatty(): display.display("Reading plaintext input from stdin", stderr=True) @@ -286,7 +286,7 @@ class VaultCLI(CLI): return yaml_ciphertext def execute_encrypt_string(self): - ''' encrypt the supplied string using the provided vault secret ''' + """ encrypt the supplied string using the provided vault secret """ b_plaintext = None # Holds tuples (the_text, the_source_of_the_string, the variable name if its provided). @@ -431,7 +431,7 @@ class VaultCLI(CLI): return output def execute_decrypt(self): - ''' decrypt the supplied file using the provided vault secret ''' + """ decrypt the supplied file using the provided vault secret """ if not context.CLIARGS['args'] and sys.stdin.isatty(): display.display("Reading ciphertext input from stdin", stderr=True) @@ -443,7 +443,7 @@ class VaultCLI(CLI): display.display("Decryption successful", stderr=True) def execute_create(self): - ''' create and open a file in an editor that will be encrypted with the provided vault secret when closed''' + """ create and open a file in an editor that will be encrypted with the provided vault secret when closed""" if len(context.CLIARGS['args']) != 1: raise AnsibleOptionsError("ansible-vault create can take only one filename argument") @@ -455,12 +455,12 @@ class VaultCLI(CLI): raise AnsibleOptionsError("not a tty, editor cannot be opened") def execute_edit(self): - ''' open and decrypt an existing vaulted file in an editor, that will be encrypted again when closed''' + """ open and decrypt an existing vaulted file in an editor, that will be encrypted again when closed""" for f in context.CLIARGS['args']: self.editor.edit_file(f) def execute_view(self): - ''' open, decrypt and view an existing vaulted file using a pager using the supplied vault secret ''' + """ open, decrypt and view an existing vaulted file using a pager using the supplied vault secret """ for f in context.CLIARGS['args']: # Note: vault should return byte strings because it could encrypt @@ -472,7 +472,7 @@ class VaultCLI(CLI): self.pager(to_text(plaintext)) def execute_rekey(self): - ''' re-encrypt a vaulted file with a new secret, the previous secret is required ''' + """ re-encrypt a vaulted file with a new secret, the previous secret is required """ for f in context.CLIARGS['args']: # FIXME: plumb in vault_id, use the default new_vault_secret for now self.editor.rekey_file(f, self.new_encrypt_secret, diff --git a/lib/ansible/compat/__init__.py b/lib/ansible/compat/__init__.py index 9977603e9d0..3f39dd4c6a0 100644 --- a/lib/ansible/compat/__init__.py +++ b/lib/ansible/compat/__init__.py @@ -15,9 +15,9 @@ # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . -''' +""" Compat library for ansible. This contains compatibility definitions for older python When we need to import a module differently depending on python version, do it here. Then in the code we can simply import from compat in order to get what we want. -''' +""" from __future__ import annotations diff --git a/lib/ansible/compat/selectors.py b/lib/ansible/compat/selectors.py deleted file mode 100644 index 0117f362ad7..00000000000 --- a/lib/ansible/compat/selectors.py +++ /dev/null @@ -1,32 +0,0 @@ -# (c) 2014, 2017 Toshio Kuratomi -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see . - -from __future__ import annotations - -import sys -import selectors - -from ansible.module_utils.common.warnings import deprecate - - -sys.modules['ansible.compat.selectors'] = selectors - - -deprecate( - msg='The `ansible.module_utils.compat.selectors` module is deprecated.', - version='2.19', -) diff --git a/lib/ansible/config/base.yml b/lib/ansible/config/base.yml index 5c1f36225eb..24f9464d0a3 100644 --- a/lib/ansible/config/base.yml +++ b/lib/ansible/config/base.yml @@ -212,18 +212,9 @@ COLLECTIONS_PATHS: default: '{{ ANSIBLE_HOME ~ "/collections:/usr/share/ansible/collections" }}' type: pathspec env: - - name: ANSIBLE_COLLECTIONS_PATHS - deprecated: - why: does not fit var naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead - version: "2.19" - name: ANSIBLE_COLLECTIONS_PATH version_added: '2.10' ini: - - key: collections_paths - section: defaults - deprecated: - why: does not fit var naming standard, use the singular form collections_path instead - version: "2.19" - key: collections_path section: defaults version_added: '2.10' @@ -304,6 +295,14 @@ COLOR_HIGHLIGHT: env: [{name: ANSIBLE_COLOR_HIGHLIGHT}] ini: - {key: highlight, section: colors} +COLOR_INCLUDED: + name: Color for 'included' task status + default: cyan + description: Defines the color to use when showing 'Included' task status. + env: [{name: ANSIBLE_COLOR_INCLUDED}] + ini: + - {key: included, section: colors} + version_added: '2.18' COLOR_OK: name: Color for 'ok' task status default: green @@ -783,7 +782,6 @@ DEFAULT_KEEP_REMOTE_FILES: - {key: keep_remote_files, section: defaults} type: boolean DEFAULT_LIBVIRT_LXC_NOSECLABEL: - # TODO: move to plugin name: No security label on Lxc default: False description: @@ -795,6 +793,10 @@ DEFAULT_LIBVIRT_LXC_NOSECLABEL: - {key: libvirt_lxc_noseclabel, section: selinux} type: boolean version_added: "2.1" + deprecated: + why: This option was moved to the plugin itself + version: "2.22" + alternatives: Use the option from the plugin itself. DEFAULT_LOAD_CALLBACK_PLUGINS: name: Load callbacks for adhoc default: False @@ -818,7 +820,9 @@ DEFAULT_LOCAL_TMP: DEFAULT_LOG_PATH: name: Ansible log file path default: ~ - description: File to which Ansible will log on the controller. When empty logging is disabled. + description: + - File to which Ansible will log on the controller. + - When not set the logging is disabled. env: [{name: ANSIBLE_LOG_PATH}] ini: - {key: log_path, section: defaults} @@ -863,8 +867,8 @@ DEFAULT_MODULE_COMPRESSION: env: [] ini: - {key: module_compression, section: defaults} -# vars: -# - name: ansible_module_compression + vars: + - name: ansible_module_compression DEFAULT_MODULE_NAME: name: Default adhoc module default: command @@ -1016,7 +1020,7 @@ DEFAULT_STDOUT_CALLBACK: EDITOR: name: editor application to use default: vi - descrioption: + description: - for the cases in which Ansible needs to return a file within an editor, this chooses the application to use. ini: - section: defaults @@ -1518,6 +1522,23 @@ GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: - The number of signatures that must be successful during GPG signature verification while installing or verifying collections. - This should be a positive integer or all to indicate all signatures must successfully validate the collection. - Prepend + to the value to fail if no valid signatures are found for the collection. +GALAXY_COLLECTION_IMPORT_POLL_INTERVAL: + description: + - The initial interval in seconds for polling the import status of a collection. + - This interval increases exponentially based on the :ref:`galaxy_collection_import_poll_factor`, with a maximum delay of 30 seconds. + type: float + default: 2.0 + env: + - name: ANSIBLE_GALAXY_COLLECTION_IMPORT_POLL_INTERVAL + version_added: '2.18' +GALAXY_COLLECTION_IMPORT_POLL_FACTOR: + description: + - The multiplier used to increase the :ref:`galaxy_collection_import_poll_interval` when checking the collection import status. + type: float + default: 1.5 + env: + - name: ANSIBLE_GALAXY_COLLECTION_IMPORT_POLL_FACTOR + version_added: "2.18" HOST_KEY_CHECKING: # NOTE: constant not in use by ssh/paramiko plugins anymore, but they do support the same configuration sources # TODO: check non ssh connection plugins for use/migration @@ -1620,60 +1641,6 @@ INVENTORY_ANY_UNPARSED_IS_FAILED: ini: - {key: any_unparsed_is_failed, section: inventory} version_added: "2.7" -INVENTORY_CACHE_ENABLED: - name: Inventory caching enabled - default: False - description: - - Toggle to turn on inventory caching. - - This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`. - - The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory configuration. - - This message will be removed in 2.16. - env: [{name: ANSIBLE_INVENTORY_CACHE}] - ini: - - {key: cache, section: inventory} - type: bool -INVENTORY_CACHE_PLUGIN: - name: Inventory cache plugin - description: - - The plugin for caching inventory. - - This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`. - - The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory and fact cache configuration. - - This message will be removed in 2.16. - env: [{name: ANSIBLE_INVENTORY_CACHE_PLUGIN}] - ini: - - {key: cache_plugin, section: inventory} -INVENTORY_CACHE_PLUGIN_CONNECTION: - name: Inventory cache plugin URI to override the defaults section - description: - - The inventory cache connection. - - This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`. - - The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory and fact cache configuration. - - This message will be removed in 2.16. - env: [{name: ANSIBLE_INVENTORY_CACHE_CONNECTION}] - ini: - - {key: cache_connection, section: inventory} -INVENTORY_CACHE_PLUGIN_PREFIX: - name: Inventory cache plugin table prefix - description: - - The table prefix for the cache plugin. - - This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`. - - The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory and fact cache configuration. - - This message will be removed in 2.16. - env: [{name: ANSIBLE_INVENTORY_CACHE_PLUGIN_PREFIX}] - default: ansible_inventory_ - ini: - - {key: cache_prefix, section: inventory} -INVENTORY_CACHE_TIMEOUT: - name: Inventory cache plugin expiration timeout - description: - - Expiration timeout for the inventory cache plugin data. - - This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`. - - The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory and fact cache configuration. - - This message will be removed in 2.16. - default: 3600 - env: [{name: ANSIBLE_INVENTORY_CACHE_TIMEOUT}] - ini: - - {key: cache_timeout, section: inventory} INVENTORY_ENABLED: name: Active Inventory plugins default: ['host_list', 'script', 'auto', 'yaml', 'ini', 'toml'] @@ -1692,7 +1659,7 @@ INVENTORY_EXPORT: type: bool INVENTORY_IGNORE_EXTS: name: Inventory ignore extensions - default: "{{(REJECT_EXTS + ('.orig', '.ini', '.cfg', '.retry'))}}" + default: "{{(REJECT_EXTS + ('.orig', '.cfg', '.retry'))}}" description: List of extensions to ignore when using a directory as an inventory source. env: [{name: ANSIBLE_INVENTORY_IGNORE}] ini: @@ -1779,7 +1746,7 @@ OLD_PLUGIN_CACHE_CLEARING: PAGER: name: pager application to use default: less - descrioption: + description: - for the cases in which Ansible needs to return output in a pageable fashion, this chooses the application to use. ini: - section: defaults @@ -2075,25 +2042,6 @@ NETCONF_SSH_CONFIG: - {key: ssh_config, section: netconf_connection} yaml: {key: netconf_connection.ssh_config} default: null -STRING_CONVERSION_ACTION: - version_added: '2.8' - description: - - Action to take when a module parameter value is converted to a string (this does not affect variables). - For string parameters, values such as '1.00', "['a', 'b',]", and 'yes', 'y', etc. - will be converted by the YAML parser unless fully quoted. - - Valid options are 'error', 'warn', and 'ignore'. - - Since 2.8, this option defaults to 'warn' but will change to 'error' in 2.12. - default: 'warn' - env: - - name: ANSIBLE_STRING_CONVERSION_ACTION - ini: - - section: defaults - key: string_conversion_action - type: string - deprecated: - why: This option is no longer used in the Ansible Core code base. - version: "2.19" - alternatives: There is no alternative at the moment. A different mechanism would have to be implemented in the current code base. VALIDATE_ACTION_GROUP_METADATA: version_added: '2.12' description: @@ -2116,4 +2064,35 @@ VERBOSE_TO_STDERR: - section: defaults key: verbose_to_stderr type: bool -... +_Z_TEST_ENTRY: + name: testentry + description: for tests + env: + - name: ANSIBLE_TEST_ENTRY + - name: ANSIBLE_TEST_ENTRY_D + deprecated: + why: for testing + version: '3.30' + alternatives: nothing + ini: + - section: testing + key: valid + - section: testing + key: deprecated + deprecated: + why: for testing + version: '3.30' + alternatives: nothing +_Z_TEST_ENTRY_2: + version_added: '2.18' + name: testentry + description: for tests + deprecated: + why: for testing + version: '3.30' + alternatives: nothing + env: + - name: ANSIBLE_TEST_ENTRY2 + ini: + - section: testing + key: valid2 diff --git a/lib/ansible/config/manager.py b/lib/ansible/config/manager.py index cd674cfb32c..818219b1304 100644 --- a/lib/ansible/config/manager.py +++ b/lib/ansible/config/manager.py @@ -4,6 +4,7 @@ from __future__ import annotations import atexit +import decimal import configparser import os import os.path @@ -16,6 +17,7 @@ from collections.abc import Mapping, Sequence from jinja2.nativetypes import NativeEnvironment from ansible.errors import AnsibleOptionsError, AnsibleError, AnsibleRequiredOptionError +from ansible.module_utils.common.sentinel import Sentinel from ansible.module_utils.common.text.converters import to_text, to_bytes, to_native from ansible.module_utils.common.yaml import yaml_load from ansible.module_utils.six import string_types @@ -51,7 +53,7 @@ GALAXY_SERVER_ADDITIONAL = { def _get_entry(plugin_type, plugin_name, config): - ''' construct entry for requested config ''' + """ construct entry for requested config """ entry = '' if plugin_type: entry += 'plugin_type: %s ' % plugin_type @@ -63,7 +65,7 @@ def _get_entry(plugin_type, plugin_name, config): # FIXME: see if we can unify in module_utils with similar function used by argspec def ensure_type(value, value_type, origin=None, origin_ftype=None): - ''' return a configuration variable with casting + """ return a configuration variable with casting :arg value: The value to ensure correct typing of :kwarg value_type: The type of the value. This can be any of the following strings: :boolean: sets the value to a True or False value @@ -86,7 +88,7 @@ def ensure_type(value, value_type, origin=None, origin_ftype=None): tildes's in the value. :str: Sets the value to string types. :string: Same as 'str' - ''' + """ errmsg = '' basedir = None @@ -101,10 +103,18 @@ def ensure_type(value, value_type, origin=None, origin_ftype=None): value = boolean(value, strict=False) elif value_type in ('integer', 'int'): - value = int(value) + if not isinstance(value, int): + try: + if (decimal_value := decimal.Decimal(value)) == (int_part := int(decimal_value)): + value = int_part + else: + errmsg = 'int' + except decimal.DecimalException as e: + raise ValueError from e elif value_type == 'float': - value = float(value) + if not isinstance(value, float): + value = float(value) elif value_type == 'list': if isinstance(value, string_types): @@ -173,14 +183,14 @@ def ensure_type(value, value_type, origin=None, origin_ftype=None): value = unquote(value) if errmsg: - raise ValueError('Invalid type provided for "%s": %s' % (errmsg, to_native(value))) + raise ValueError(f'Invalid type provided for "{errmsg}": {value!r}') return to_text(value, errors='surrogate_or_strict', nonstring='passthru') # FIXME: see if this can live in utils/path def resolve_path(path, basedir=None): - ''' resolve relative or 'variable' paths ''' + """ resolve relative or 'variable' paths """ if '{{CWD}}' in path: # allow users to force CWD using 'magic' {{CWD}} path = path.replace('{{CWD}}', os.getcwd()) @@ -205,7 +215,7 @@ def get_config_type(cfile): # FIXME: can move to module_utils for use for ini plugins also? def get_ini_config_value(p, entry): - ''' returns the value of last ini entry found ''' + """ returns the value of last ini entry found """ value = None if p is not None: try: @@ -216,22 +226,20 @@ def get_ini_config_value(p, entry): def find_ini_config_file(warnings=None): - ''' Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible ''' + """ Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible """ # FIXME: eventually deprecate ini configs if warnings is None: # Note: In this case, warnings does nothing warnings = set() - # A value that can never be a valid path so that we can tell if ANSIBLE_CONFIG was set later - # We can't use None because we could set path to None. - SENTINEL = object - potential_paths = [] + # A value that can never be a valid path so that we can tell if ANSIBLE_CONFIG was set later + # We can't use None because we could set path to None. # Environment setting - path_from_env = os.getenv("ANSIBLE_CONFIG", SENTINEL) - if path_from_env is not SENTINEL: + path_from_env = os.getenv("ANSIBLE_CONFIG", Sentinel) + if path_from_env is not Sentinel: path_from_env = unfrackpath(path_from_env, follow=False) if os.path.isdir(to_bytes(path_from_env)): path_from_env = os.path.join(path_from_env, "ansible.cfg") @@ -281,7 +289,7 @@ def find_ini_config_file(warnings=None): def _add_base_defs_deprecations(base_defs): - '''Add deprecation source 'ansible.builtin' to deprecations in base.yml''' + """Add deprecation source 'ansible.builtin' to deprecations in base.yml""" def process(entry): if 'deprecated' in entry: entry['deprecated']['collection_name'] = 'ansible.builtin' @@ -380,7 +388,7 @@ class ConfigManager(object): "Missing base YAML definition file (bad install?): %s" % to_native(yml_file)) def _parse_config_file(self, cfile=None): - ''' return flat configuration settings from file(s) ''' + """ return flat configuration settings from file(s) """ # TODO: take list of files with merge/nomerge if cfile is None: @@ -407,7 +415,7 @@ class ConfigManager(object): raise AnsibleOptionsError("Unsupported configuration file type: %s" % to_native(ftype)) def _find_yaml_config_files(self): - ''' Load YAML Config Files in order, check merge flags, keep origin of settings''' + """ Load YAML Config Files in order, check merge flags, keep origin of settings""" pass def get_plugin_options(self, plugin_type, name, keys=None, variables=None, direct=None): @@ -459,7 +467,7 @@ class ConfigManager(object): return has def get_configuration_definitions(self, plugin_type=None, name=None, ignore_private=False): - ''' just list the possible settings, either base or for specific plugins or plugin ''' + """ just list the possible settings, either base or for specific plugins or plugin """ ret = {} if plugin_type is None: @@ -476,7 +484,7 @@ class ConfigManager(object): return ret def _loop_entries(self, container, entry_list): - ''' repeat code for value entry assignment ''' + """ repeat code for value entry assignment """ value = None origin = None @@ -502,7 +510,7 @@ class ConfigManager(object): return value, origin def get_config_value(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None): - ''' wrapper ''' + """ wrapper """ try: value, _drop = self.get_config_value_and_origin(config, cfile=cfile, plugin_type=plugin_type, plugin_name=plugin_name, @@ -514,7 +522,7 @@ class ConfigManager(object): return value def get_config_value_and_origin(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None): - ''' Given a config key figure out the actual value and report on the origin of the settings ''' + """ Given a config key figure out the actual value and report on the origin of the settings """ if cfile is None: # use default config cfile = self._config_file @@ -672,3 +680,19 @@ class ConfigManager(object): self._plugins[plugin_type] = {} self._plugins[plugin_type][name] = defs + + @staticmethod + def get_deprecated_msg_from_config(dep_docs, include_removal=False, collection_name=None): + + removal = '' + if include_removal: + if 'removed_at_date' in dep_docs: + removal = f"Will be removed in a release after {dep_docs['removed_at_date']}\n\t" + elif collection_name: + removal = f"Will be removed in: {collection_name} {dep_docs['removed_in']}\n\t" + else: + removal = f"Will be removed in: Ansible {dep_docs['removed_in']}\n\t" + + # TODO: choose to deprecate either singular or plural + alt = dep_docs.get('alternatives', dep_docs.get('alternative', 'none')) + return f"Reason: {dep_docs['why']}\n\t{removal}Alternatives: {alt}" diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 42b1b1c7bd7..af60053a3dd 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -15,9 +15,13 @@ from ansible.module_utils.parsing.convert_bool import BOOLEANS_TRUE from ansible.release import __version__ from ansible.utils.fqcn import add_internal_fqcns +# initialize config manager/config data to read/store global settings +# and generate 'pseudo constants' for app consumption. +config = ConfigManager() + def _warning(msg): - ''' display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write ''' + """ display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write """ try: from ansible.utils.display import Display Display().warning(msg) @@ -27,7 +31,7 @@ def _warning(msg): def _deprecated(msg, version): - ''' display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write ''' + """ display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write """ try: from ansible.utils.display import Display Display().deprecated(msg, version=version) @@ -36,8 +40,30 @@ def _deprecated(msg, version): sys.stderr.write(' [DEPRECATED] %s, to be removed in %s\n' % (msg, version)) +def handle_config_noise(display=None): + + if display is not None: + w = display.warning + d = display.deprecated + else: + w = _warning + d = _deprecated + + while config.WARNINGS: + warn = config.WARNINGS.pop() + w(warn) + + while config.DEPRECATED: + # tuple with name and options + dep = config.DEPRECATED.pop(0) + msg = config.get_deprecated_msg_from_config(dep[1]) + # use tabs only for ansible-doc? + msg = msg.replace("\t", "") + d(f"{dep[0]} option. {msg}", version=dep[1]['version']) + + def set_constant(name, value, export=vars()): - ''' sets constants and returns resolved options dict ''' + """ sets constants and returns resolved options dict """ export[name] = value @@ -152,10 +178,10 @@ INTERNAL_STATIC_VARS = frozenset( ] ) LOCALHOST = ('127.0.0.1', 'localhost', '::1') -MODULE_REQUIRE_ARGS = tuple(add_internal_fqcns(('command', 'win_command', 'ansible.windows.win_command', 'shell', 'win_shell', - 'ansible.windows.win_shell', 'raw', 'script'))) -MODULE_NO_JSON = tuple(add_internal_fqcns(('command', 'win_command', 'ansible.windows.win_command', 'shell', 'win_shell', - 'ansible.windows.win_shell', 'raw'))) +WIN_MOVED = ['ansible.windows.win_command', 'ansible.windows.win_shell'] +MODULE_REQUIRE_ARGS_SIMPLE = ['command', 'raw', 'script', 'shell', 'win_command', 'win_shell'] +MODULE_REQUIRE_ARGS = tuple(add_internal_fqcns(MODULE_REQUIRE_ARGS_SIMPLE) + WIN_MOVED) +MODULE_NO_JSON = tuple(add_internal_fqcns(('command', 'win_command', 'shell', 'win_shell', 'raw')) + WIN_MOVED) RESTRICTED_RESULT_KEYS = ('ansible_rsync_path', 'ansible_playbook_python', 'ansible_facts') SYNTHETIC_COLLECTIONS = ('ansible.builtin', 'ansible.legacy') TREE_DIR = None @@ -218,11 +244,8 @@ MAGIC_VARIABLE_MAPPING = dict( ) # POPULATE SETTINGS FROM CONFIG ### -config = ConfigManager() - -# Generate constants from config for setting in config.get_configuration_definitions(): set_constant(setting, config.get_config_value(setting, variables=vars())) -for warn in config.WARNINGS: - _warning(warn) +# emit any warnings or deprecations +handle_config_noise() diff --git a/lib/ansible/errors/__init__.py b/lib/ansible/errors/__init__.py index f003b589c8a..31ee4bdf1da 100644 --- a/lib/ansible/errors/__init__.py +++ b/lib/ansible/errors/__init__.py @@ -36,7 +36,7 @@ from ansible.module_utils.common.text.converters import to_native, to_text class AnsibleError(Exception): - ''' + """ This is the base class for all errors raised from Ansible code, and can be instantiated with two optional parameters beyond the error message to control whether detailed information is displayed @@ -48,7 +48,7 @@ class AnsibleError(Exception): Where "obj" is some subclass of ansible.parsing.yaml.objects.AnsibleBaseYAMLObject, which should be returned by the DataLoader() class. - ''' + """ def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None): super(AnsibleError, self).__init__(message) @@ -66,14 +66,18 @@ class AnsibleError(Exception): from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject message = [self._message] + + # Add from previous exceptions + if self.orig_exc: + message.append('. %s' % to_native(self.orig_exc)) + + # Add from yaml to give specific file/line no if isinstance(self.obj, AnsibleBaseYAMLObject): extended_error = self._get_extended_error() if extended_error and not self._suppress_extended_error: message.append( '\n\n%s' % to_native(extended_error) ) - elif self.orig_exc: - message.append('. %s' % to_native(self.orig_exc)) return ''.join(message) @@ -88,11 +92,11 @@ class AnsibleError(Exception): return self.message def _get_error_lines_from_file(self, file_name, line_number): - ''' + """ Returns the line in the file which corresponds to the reported error location, as well as the line preceding it (if the error did not occur on the first line), to provide context to the error. - ''' + """ target_line = '' prev_line = '' @@ -121,7 +125,7 @@ class AnsibleError(Exception): return (target_line, prev_line) def _get_extended_error(self): - ''' + """ Given an object reporting the location of the exception in a file, return detailed information regarding it including: @@ -130,7 +134,7 @@ class AnsibleError(Exception): If this error was created with show_content=False, the reporting of content is suppressed, as the file contents may be sensitive (ie. vault data). - ''' + """ error_message = '' @@ -210,85 +214,85 @@ class AnsibleError(Exception): class AnsiblePromptInterrupt(AnsibleError): - '''User interrupt''' + """User interrupt""" class AnsiblePromptNoninteractive(AnsibleError): - '''Unable to get user input''' + """Unable to get user input""" class AnsibleAssertionError(AnsibleError, AssertionError): - '''Invalid assertion''' + """Invalid assertion""" pass class AnsibleOptionsError(AnsibleError): - ''' bad or incomplete options passed ''' + """ bad or incomplete options passed """ pass class AnsibleRequiredOptionError(AnsibleOptionsError): - ''' bad or incomplete options passed ''' + """ bad or incomplete options passed """ pass class AnsibleParserError(AnsibleError): - ''' something was detected early that is wrong about a playbook or data file ''' + """ something was detected early that is wrong about a playbook or data file """ pass class AnsibleInternalError(AnsibleError): - ''' internal safeguards tripped, something happened in the code that should never happen ''' + """ internal safeguards tripped, something happened in the code that should never happen """ pass class AnsibleRuntimeError(AnsibleError): - ''' ansible had a problem while running a playbook ''' + """ ansible had a problem while running a playbook """ pass class AnsibleModuleError(AnsibleRuntimeError): - ''' a module failed somehow ''' + """ a module failed somehow """ pass class AnsibleConnectionFailure(AnsibleRuntimeError): - ''' the transport / connection_plugin had a fatal error ''' + """ the transport / connection_plugin had a fatal error """ pass class AnsibleAuthenticationFailure(AnsibleConnectionFailure): - '''invalid username/password/key''' + """invalid username/password/key""" pass class AnsibleCallbackError(AnsibleRuntimeError): - ''' a callback failure ''' + """ a callback failure """ pass class AnsibleTemplateError(AnsibleRuntimeError): - '''A template related error''' + """A template related error""" pass class AnsibleFilterError(AnsibleTemplateError): - ''' a templating failure ''' + """ a templating failure """ pass class AnsibleLookupError(AnsibleTemplateError): - ''' a lookup failure ''' + """ a lookup failure """ pass class AnsibleUndefinedVariable(AnsibleTemplateError): - ''' a templating failure ''' + """ a templating failure """ pass class AnsibleFileNotFound(AnsibleRuntimeError): - ''' a file missing failure ''' + """ a file missing failure """ def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, paths=None, file_name=None): @@ -318,7 +322,7 @@ class AnsibleFileNotFound(AnsibleRuntimeError): # DO NOT USE as they will probably be removed soon. # We will port the action modules in our tree to use a context manager instead. class AnsibleAction(AnsibleRuntimeError): - ''' Base Exception for Action plugin flow control ''' + """ Base Exception for Action plugin flow control """ def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None): @@ -331,7 +335,7 @@ class AnsibleAction(AnsibleRuntimeError): class AnsibleActionSkip(AnsibleAction): - ''' an action runtime skip''' + """ an action runtime skip""" def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None): super(AnsibleActionSkip, self).__init__(message=message, obj=obj, show_content=show_content, @@ -340,7 +344,7 @@ class AnsibleActionSkip(AnsibleAction): class AnsibleActionFail(AnsibleAction): - ''' an action runtime failure''' + """ an action runtime failure""" def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None): super(AnsibleActionFail, self).__init__(message=message, obj=obj, show_content=show_content, suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result) @@ -348,37 +352,37 @@ class AnsibleActionFail(AnsibleAction): class _AnsibleActionDone(AnsibleAction): - ''' an action runtime early exit''' + """ an action runtime early exit""" pass class AnsiblePluginError(AnsibleError): - ''' base class for Ansible plugin-related errors that do not need AnsibleError contextual data ''' + """ base class for Ansible plugin-related errors that do not need AnsibleError contextual data """ def __init__(self, message=None, plugin_load_context=None): super(AnsiblePluginError, self).__init__(message) self.plugin_load_context = plugin_load_context class AnsiblePluginRemovedError(AnsiblePluginError): - ''' a requested plugin has been removed ''' + """ a requested plugin has been removed """ pass class AnsiblePluginCircularRedirect(AnsiblePluginError): - '''a cycle was detected in plugin redirection''' + """a cycle was detected in plugin redirection""" pass class AnsibleCollectionUnsupportedVersionError(AnsiblePluginError): - '''a collection is not supported by this version of Ansible''' + """a collection is not supported by this version of Ansible""" pass class AnsibleFilterTypeError(AnsibleTemplateError, TypeError): - ''' a Jinja filter templating failure due to bad type''' + """ a Jinja filter templating failure due to bad type""" pass class AnsiblePluginNotFound(AnsiblePluginError): - ''' Indicates we did not find an Ansible plugin ''' + """ Indicates we did not find an Ansible plugin """ pass diff --git a/lib/ansible/executor/interpreter_discovery.py b/lib/ansible/executor/interpreter_discovery.py index 6d105817b03..24b2174d3c8 100644 --- a/lib/ansible/executor/interpreter_discovery.py +++ b/lib/ansible/executor/interpreter_discovery.py @@ -41,7 +41,7 @@ class InterpreterDiscoveryRequiredError(Exception): def discover_interpreter(action, interpreter_name, discovery_mode, task_vars): # interpreter discovery is a 2-step process with the target. First, we use a simple shell-agnostic bootstrap to # get the system type from uname, and find any random Python that can get us the info we need. For supported - # target OS types, we'll dispatch a Python script that calls plaform.dist() (for older platforms, where available) + # target OS types, we'll dispatch a Python script that calls platform.dist() (for older platforms, where available) # and brings back /etc/os-release (if present). The proper Python path is looked up in a table of known # distros/versions with included Pythons; if nothing is found, depending on the discovery mode, either the # default fallback of /usr/bin/python is used (if we know it's there), or discovery fails. diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py index 717a39833ce..d4c2eab600f 100644 --- a/lib/ansible/executor/module_common.py +++ b/lib/ansible/executor/module_common.py @@ -74,7 +74,7 @@ _MODULE_UTILS_PATH = os.path.join(os.path.dirname(__file__), '..', 'module_utils # ****************************************************************************** -ANSIBALLZ_TEMPLATE = u'''%(shebang)s +ANSIBALLZ_TEMPLATE = u"""%(shebang)s %(coding)s _ANSIBALLZ_WRAPPER = True # For test-module.py script to tell this is a ANSIBALLZ_WRAPPER # This code is part of Ansible, but is an independent component. @@ -333,9 +333,9 @@ def _ansiballz_main(): if __name__ == '__main__': _ansiballz_main() -''' +""" -ANSIBALLZ_COVERAGE_TEMPLATE = ''' +ANSIBALLZ_COVERAGE_TEMPLATE = """ os.environ['COVERAGE_FILE'] = %(coverage_output)r + '=python-%%s=coverage' %% '.'.join(str(v) for v in sys.version_info[:2]) import atexit @@ -355,9 +355,9 @@ ANSIBALLZ_COVERAGE_TEMPLATE = ''' atexit.register(atexit_coverage) cov.start() -''' +""" -ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = ''' +ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = """ try: if PY3: import importlib.util @@ -369,9 +369,9 @@ ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = ''' except ImportError: print('{"msg": "Could not find `coverage` module.", "failed": true}') sys.exit(1) -''' +""" -ANSIBALLZ_RLIMIT_TEMPLATE = ''' +ANSIBALLZ_RLIMIT_TEMPLATE = """ import resource existing_soft, existing_hard = resource.getrlimit(resource.RLIMIT_NOFILE) @@ -385,7 +385,7 @@ ANSIBALLZ_RLIMIT_TEMPLATE = ''' except ValueError: # some platforms (eg macOS) lie about their hard limit pass -''' +""" def _strip_comments(source): diff --git a/lib/ansible/executor/play_iterator.py b/lib/ansible/executor/play_iterator.py index 474b5da94f4..e512b64b840 100644 --- a/lib/ansible/executor/play_iterator.py +++ b/lib/ansible/executor/play_iterator.py @@ -440,14 +440,38 @@ class PlayIterator: else: state.cur_handlers_task += 1 if task.is_host_notified(host): - break + return state, task elif state.run_state == IteratingStates.COMPLETE: return (state, None) # if something above set the task, break out of the loop now if task: - break + # skip implicit flush_handlers if there are no handlers notified + if ( + task.implicit + and task.action in C._ACTION_META + and task.args.get('_raw_params', None) == 'flush_handlers' + and ( + # the state store in the `state` variable could be a nested state, + # notifications are always stored in the top level state, get it here + not self.get_state_for_host(host.name).handler_notifications + # in case handlers notifying other handlers, the notifications are not + # saved in `handler_notifications` and handlers are notified directly + # to prevent duplicate handler runs, so check whether any handler + # is notified + and all(not h.notified_hosts for h in self.handlers) + ) + ): + display.debug("No handler notifications for %s, skipping." % host.name) + elif ( + (role := task._role) + and role._metadata.allow_duplicates is False + and host.name in self._play._get_cached_role(role)._completed + ): + display.debug("'%s' skipped because role has already run" % task) + else: + break return (state, task) @@ -539,9 +563,9 @@ class PlayIterator: self._clear_state_errors(state.always_child_state) def get_active_state(self, state): - ''' + """ Finds the active state, recursively if necessary when there are child states. - ''' + """ if state.run_state == IteratingStates.TASKS and state.tasks_child_state is not None: return self.get_active_state(state.tasks_child_state) elif state.run_state == IteratingStates.RESCUE and state.rescue_child_state is not None: @@ -551,10 +575,10 @@ class PlayIterator: return state def is_any_block_rescuing(self, state): - ''' + """ Given the current HostState state, determines if the current block, or any child blocks, are in rescue mode. - ''' + """ if state.run_state == IteratingStates.TASKS and state.get_current_block().rescue: return True if state.tasks_child_state is not None: @@ -635,3 +659,19 @@ class PlayIterator: def clear_notification(self, hostname: str, notification: str) -> None: self._host_states[hostname].handler_notifications.remove(notification) + + def end_host(self, hostname: str) -> None: + """Used by ``end_host``, ``end_batch`` and ``end_play`` meta tasks to end executing given host.""" + state = self.get_active_state(self.get_state_for_host(hostname)) + if state.run_state == IteratingStates.RESCUE: + # This is a special case for when ending a host occurs in rescue. + # By definition the meta task responsible for ending the host + # is the last task, so we need to clear the fail state to mark + # the host as rescued. + # The reason we need to do that is because this operation is + # normally done when PlayIterator transitions from rescue to + # always when only then we can say that rescue didn't fail + # but with ending a host via meta task, we don't get to that transition. + self.set_fail_state_for_host(hostname, FailedStates.NONE) + self.set_run_state_for_host(hostname, IteratingStates.COMPLETE) + self._play._removed_hosts.append(hostname) diff --git a/lib/ansible/executor/playbook_executor.py b/lib/ansible/executor/playbook_executor.py index f439967838b..468c4bdc709 100644 --- a/lib/ansible/executor/playbook_executor.py +++ b/lib/ansible/executor/playbook_executor.py @@ -40,10 +40,10 @@ display = Display() class PlaybookExecutor: - ''' + """ This is the primary class for executing playbooks, and thus the basis for bin/ansible-playbook operation. - ''' + """ def __init__(self, playbooks, inventory, variable_manager, loader, passwords): self._playbooks = playbooks @@ -74,10 +74,10 @@ class PlaybookExecutor: set_default_transport() def run(self): - ''' + """ Run the given playbook, based on the settings in the play which may limit the runs to serialized groups, etc. - ''' + """ result = 0 entrylist = [] @@ -267,10 +267,10 @@ class PlaybookExecutor: return result def _get_serialized_batches(self, play): - ''' + """ Returns a list of hosts, subdivided into batches based on the serial size specified in the play. - ''' + """ # make sure we have a unique list of hosts all_hosts = self._inventory.get_hosts(play.hosts, order=play.order) @@ -313,11 +313,11 @@ class PlaybookExecutor: return serialized_batches def _generate_retry_inventory(self, retry_path, replay_hosts): - ''' + """ Called when a playbook run fails. It generates an inventory which allows re-running on ONLY the failed hosts. This may duplicate some variable information in group_vars/host_vars but that is ok, and expected. - ''' + """ try: makedirs_safe(os.path.dirname(retry_path)) with open(retry_path, 'w') as fd: diff --git a/lib/ansible/executor/powershell/become_wrapper.ps1 b/lib/ansible/executor/powershell/become_wrapper.ps1 index f40e2658f5f..cea42c128aa 100644 --- a/lib/ansible/executor/powershell/become_wrapper.ps1 +++ b/lib/ansible/executor/powershell/become_wrapper.ps1 @@ -116,12 +116,11 @@ Write-AnsibleLog "INFO - parsed become input, user: '$username', type: '$logon_t # set to Stop and cannot be changed. Also need to split the payload from the wrapper to prevent potentially # sensitive content from being logged by the scriptblock logger. $bootstrap_wrapper = { - &chcp.com 65001 > $null - $exec_wrapper_str = [System.Console]::In.ReadToEnd() - $split_parts = $exec_wrapper_str.Split(@("`0`0`0`0"), 2, [StringSplitOptions]::RemoveEmptyEntries) + [Console]::InputEncoding = [Console]::OutputEncoding = New-Object System.Text.UTF8Encoding + $ew = [System.Console]::In.ReadToEnd() + $split_parts = $ew.Split(@("`0`0`0`0"), 2, [StringSplitOptions]::RemoveEmptyEntries) Set-Variable -Name json_raw -Value $split_parts[1] - $exec_wrapper = [ScriptBlock]::Create($split_parts[0]) - &$exec_wrapper + &([ScriptBlock]::Create($split_parts[0])) } $exec_command = [System.Convert]::ToBase64String([System.Text.Encoding]::Unicode.GetBytes($bootstrap_wrapper.ToString())) $lp_command_line = "powershell.exe -NonInteractive -NoProfile -ExecutionPolicy Bypass -EncodedCommand $exec_command" diff --git a/lib/ansible/executor/powershell/bootstrap_wrapper.ps1 b/lib/ansible/executor/powershell/bootstrap_wrapper.ps1 index cdba80cbb01..8e7141eb515 100644 --- a/lib/ansible/executor/powershell/bootstrap_wrapper.ps1 +++ b/lib/ansible/executor/powershell/bootstrap_wrapper.ps1 @@ -1,4 +1,4 @@ -&chcp.com 65001 > $null +try { [Console]::InputEncoding = [Console]::OutputEncoding = New-Object System.Text.UTF8Encoding } catch { $null = $_ } if ($PSVersionTable.PSVersion -lt [Version]"3.0") { '{"failed":true,"msg":"Ansible requires PowerShell v3.0 or newer"}' @@ -9,5 +9,4 @@ $exec_wrapper_str = $input | Out-String $split_parts = $exec_wrapper_str.Split(@("`0`0`0`0"), 2, [StringSplitOptions]::RemoveEmptyEntries) If (-not $split_parts.Length -eq 2) { throw "invalid payload" } Set-Variable -Name json_raw -Value $split_parts[1] -$exec_wrapper = [ScriptBlock]::Create($split_parts[0]) -&$exec_wrapper +& ([ScriptBlock]::Create($split_parts[0])) diff --git a/lib/ansible/executor/powershell/exec_wrapper.ps1 b/lib/ansible/executor/powershell/exec_wrapper.ps1 index cce99abc77f..4ecc1367c84 100644 --- a/lib/ansible/executor/powershell/exec_wrapper.ps1 +++ b/lib/ansible/executor/powershell/exec_wrapper.ps1 @@ -16,7 +16,7 @@ begin { .SYNOPSIS Converts a JSON string to a Hashtable/Array in the fastest way possible. Unfortunately ConvertFrom-Json is still faster but outputs - a PSCustomObject which is combersone for module consumption. + a PSCustomObject which is cumbersome for module consumption. .PARAMETER InputObject [String] The JSON string to deserialize. diff --git a/lib/ansible/executor/powershell/module_manifest.py b/lib/ansible/executor/powershell/module_manifest.py index 99b18e5ff4b..da69c9dacb5 100644 --- a/lib/ansible/executor/powershell/module_manifest.py +++ b/lib/ansible/executor/powershell/module_manifest.py @@ -8,7 +8,7 @@ import errno import json import os import pkgutil -import random +import secrets import re from importlib import import_module @@ -254,9 +254,8 @@ def _slurp(path): if not os.path.exists(path): raise AnsibleError("imported module support code does not exist at %s" % os.path.abspath(path)) - fd = open(path, 'rb') - data = fd.read() - fd.close() + with open(path, 'rb') as fd: + data = fd.read() return data @@ -318,7 +317,7 @@ def _create_powershell_wrapper(b_module_data, module_path, module_args, exec_manifest["actions"].insert(0, 'async_watchdog') exec_manifest["actions"].insert(0, 'async_wrapper') - exec_manifest["async_jid"] = f'j{random.randint(0, 999999999999)}' + exec_manifest["async_jid"] = f'j{secrets.randbelow(999999999999)}' exec_manifest["async_timeout_sec"] = async_timeout exec_manifest["async_startup_timeout"] = C.config.get_config_value("WIN_ASYNC_STARTUP_TIMEOUT", variables=task_vars) diff --git a/lib/ansible/executor/process/worker.py b/lib/ansible/executor/process/worker.py index 6c26aed164c..f5e7b979f42 100644 --- a/lib/ansible/executor/process/worker.py +++ b/lib/ansible/executor/process/worker.py @@ -47,11 +47,11 @@ class WorkerQueue(Queue): class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defined] - ''' + """ The worker thread class, which uses TaskExecutor to run tasks read from a job queue and pushes results into a results queue for reading later. - ''' + """ def __init__(self, final_q, task_vars, host, task, play_context, loader, variable_manager, shared_loader_obj, worker_id): @@ -91,13 +91,13 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin self._new_stdin = open(os.devnull) def start(self): - ''' + """ multiprocessing.Process replaces the worker's stdin with a new file but we wish to preserve it if it is connected to a terminal. Therefore dup a copy prior to calling the real start(), ensuring the descriptor is preserved somewhere in the new child, and make sure it is closed in the parent when start() completes. - ''' + """ self._save_stdin() # FUTURE: this lock can be removed once a more generalized pre-fork thread pause is in place @@ -108,12 +108,12 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin self._new_stdin.close() def _hard_exit(self, e): - ''' + """ There is no safe exception to return to higher level code that does not risk an innocent try/except finding itself executing in the wrong process. All code executing above WorkerProcess.run() on the stack conceptually belongs to another program. - ''' + """ try: display.debug(u"WORKER HARD EXIT: %s" % to_text(e)) @@ -126,7 +126,7 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin os._exit(1) def run(self): - ''' + """ Wrap _run() to ensure no possibility an errant exception can cause control to return to the StrategyBase task loop, or any other code higher in the stack. @@ -134,7 +134,7 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin As multiprocessing in Python 2.x provides no protection, it is possible a try/except added in far-away code can cause a crashed child process to suddenly assume the role and prior state of its parent. - ''' + """ try: return self._run() except BaseException as e: @@ -155,11 +155,11 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin sys.stdout = sys.stderr = open(os.devnull, 'w') def _run(self): - ''' + """ Called when the process is started. Pushes the result onto the results queue. We also remove the host from the blocked hosts list, to signify that they are ready for their next task. - ''' + """ # import cProfile, pstats, StringIO # pr = cProfile.Profile() diff --git a/lib/ansible/executor/stats.py b/lib/ansible/executor/stats.py index a7cc713b27d..acedf10759f 100644 --- a/lib/ansible/executor/stats.py +++ b/lib/ansible/executor/stats.py @@ -23,7 +23,7 @@ from ansible.utils.vars import merge_hash class AggregateStats: - ''' holds stats about per-host activity during playbook runs ''' + """ holds stats about per-host activity during playbook runs """ def __init__(self): @@ -40,7 +40,7 @@ class AggregateStats: self.custom = {} def increment(self, what, host): - ''' helper function to bump a statistic ''' + """ helper function to bump a statistic """ self.processed[host] = 1 prev = (getattr(self, what)).get(host, 0) @@ -57,7 +57,7 @@ class AggregateStats: _what[host] = 0 def summarize(self, host): - ''' return information about a particular host ''' + """ return information about a particular host """ return dict( ok=self.ok.get(host, 0), @@ -70,7 +70,7 @@ class AggregateStats: ) def set_custom_stats(self, which, what, host=None): - ''' allow setting of a custom stat''' + """ allow setting of a custom stat""" if host is None: host = '_run' @@ -80,7 +80,7 @@ class AggregateStats: self.custom[host][which] = what def update_custom_stats(self, which, what, host=None): - ''' allow aggregation of a custom stat''' + """ allow aggregation of a custom stat""" if host is None: host = '_run' diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index 9c21a6c1675..ff1c33871f2 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -21,6 +21,7 @@ from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.six import binary_type from ansible.module_utils.common.text.converters import to_text, to_native from ansible.module_utils.connection import write_to_stream +from ansible.module_utils.six import string_types from ansible.playbook.conditional import Conditional from ansible.playbook.task import Task from ansible.plugins import get_plugin_class @@ -31,7 +32,7 @@ from ansible.utils.listify import listify_lookup_plugin_terms from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var from ansible.vars.clean import namespace_facts, clean_facts from ansible.utils.display import Display -from ansible.utils.vars import combine_vars, isidentifier +from ansible.utils.vars import combine_vars display = Display() @@ -60,10 +61,10 @@ def task_timeout(signum, frame): def remove_omit(task_args, omit_token): - ''' + """ Remove args with a value equal to the ``omit_token`` recursively to align with now having suboptions in the argument_spec - ''' + """ if not isinstance(task_args, dict): return task_args @@ -84,12 +85,12 @@ def remove_omit(task_args, omit_token): class TaskExecutor: - ''' + """ This is the main worker class for the executor pipeline, which handles loading an action plugin to actually dispatch the task to a given host. This class roughly corresponds to the old Runner() class. - ''' + """ def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, final_q, variable_manager): self._host = host @@ -107,12 +108,12 @@ class TaskExecutor: self._task.squash() def run(self): - ''' + """ The main executor entrypoint, where we determine if the specified task requires looping and either runs the task with self._run_loop() or self._execute(). After that, the returned results are parsed and returned as a dict. - ''' + """ display.debug("in run() - task %s" % self._task._uuid) @@ -149,6 +150,7 @@ class TaskExecutor: if 'unreachable' in item and item['unreachable']: item_ignore_unreachable = item.pop('_ansible_ignore_unreachable') if not res.get('unreachable'): + res['unreachable'] = True self._task.ignore_unreachable = item_ignore_unreachable elif self._task.ignore_unreachable and not item_ignore_unreachable: self._task.ignore_unreachable = item_ignore_unreachable @@ -217,10 +219,10 @@ class TaskExecutor: display.debug(u"error closing connection: %s" % to_text(e)) def _get_loop_items(self): - ''' + """ Loads a lookup plugin to handle the with_* portion of a task (if specified), and returns the items result. - ''' + """ # get search path for this task to pass to lookup plugins self._job_vars['ansible_search_path'] = self._task.get_search_path() @@ -265,11 +267,11 @@ class TaskExecutor: return items def _run_loop(self, items): - ''' + """ Runs the task with the loop items specified and collates the result into an array named 'results' which is inserted into the final result along with the item for which the loop ran. - ''' + """ task_vars = self._job_vars templar = Templar(loader=self._loader, variables=task_vars) @@ -342,6 +344,13 @@ class TaskExecutor: (self._task, tmp_task) = (tmp_task, self._task) (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context) res = self._execute(variables=task_vars) + + if self._task.register: + # Ensure per loop iteration results are registered in case `_execute()` + # returns early (when conditional, failure, ...). + # This is needed in case the registered variable is used in the loop label template. + task_vars[self._task.register] = res + task_fields = self._task.dump_attrs() (self._task, tmp_task) = (tmp_task, self._task) (self._play_context, tmp_play_context) = (tmp_play_context, self._play_context) @@ -372,12 +381,17 @@ class TaskExecutor: 'msg': 'Failed to template loop_control.label: %s' % to_text(e) }) + # if plugin is loaded, get resolved name, otherwise leave original task connection + if self._connection and not isinstance(self._connection, string_types): + task_fields['connection'] = getattr(self._connection, 'ansible_name') + tr = TaskResult( self._host.name, self._task._uuid, res, task_fields=task_fields, ) + if tr.is_failed() or tr.is_unreachable(): self._final_q.send_callback('v2_runner_item_on_failed', tr) elif tr.is_skipped(): @@ -389,6 +403,19 @@ class TaskExecutor: self._final_q.send_callback('v2_runner_item_on_ok', tr) results.append(res) + + # break loop if break_when conditions are met + if self._task.loop_control and self._task.loop_control.break_when: + cond = Conditional(loader=self._loader) + cond.when = self._task.loop_control.get_validated_value( + 'break_when', self._task.loop_control.fattributes.get('break_when'), self._task.loop_control.break_when, templar + ) + if cond.evaluate_conditional(templar, task_vars): + # delete loop vars before exiting loop + del task_vars[loop_var] + break + + # done with loop var, remove for next iteration del task_vars[loop_var] # clear 'connection related' plugin variables for next iteration @@ -426,11 +453,11 @@ class TaskExecutor: variables.update(delegated_vars) def _execute(self, variables=None): - ''' + """ The primary workhorse of the executor system, this runs the task on the specified host (which may be the delegated_to host) and handles the retry/until and block rescue/always execution - ''' + """ if variables is None: variables = self._job_vars @@ -650,7 +677,7 @@ class TaskExecutor: return dict(unreachable=True, msg=to_text(e)) except TaskTimeoutError as e: msg = 'The %s action failed to execute in the expected time frame (%d) and was terminated' % (self._task.action, self._task.timeout) - return dict(failed=True, msg=msg, timedout=e.frame) + return dict(failed=True, msg=msg, timedout={'frame': e.frame, 'period': self._task.timeout}) finally: if self._task.timeout: signal.alarm(0) @@ -658,8 +685,8 @@ class TaskExecutor: self._handler.cleanup() display.debug("handler run complete") - # preserve no log - result["_ansible_no_log"] = no_log + # propagate no log to result- the action can set this, so only overwrite it with the task's value if missing or falsey + result["_ansible_no_log"] = bool(no_log or result.get('_ansible_no_log', False)) if self._task.action not in C._ACTION_WITH_CLEAN_FACTS: result = wrap_var(result) @@ -667,9 +694,6 @@ class TaskExecutor: # update the local copy of vars with the registered value, if specified, # or any facts which may have been generated by the module execution if self._task.register: - if not isidentifier(self._task.register): - raise AnsibleError("Invalid variable name in 'register' specified: '%s'" % self._task.register) - vars_copy[self._task.register] = result if self._task.async_val > 0: @@ -835,9 +859,9 @@ class TaskExecutor: return result def _poll_async_result(self, result, templar, task_vars=None): - ''' + """ Polls for the specified JID to be complete - ''' + """ if task_vars is None: task_vars = self._job_vars @@ -953,10 +977,10 @@ class TaskExecutor: return become def _get_connection(self, cvars, templar, current_connection): - ''' + """ Reads the connection property for the host, and returns the correct connection object from the list of connection plugins - ''' + """ self._play_context.connection = current_connection @@ -1058,7 +1082,7 @@ class TaskExecutor: # add extras if plugin supports them if getattr(self._connection, 'allow_extras', False): for k in variables: - if k.startswith('ansible_%s_' % self._connection._load_name) and k not in options: + if k.startswith('ansible_%s_' % self._connection.extras_prefix) and k not in options: options['_extras'][k] = templar.template(variables[k]) task_keys = self._task.dump_attrs() @@ -1111,15 +1135,15 @@ class TaskExecutor: return varnames def _get_action_handler(self, templar): - ''' + """ Returns the correct action plugin to handle the requestion task action - ''' + """ return self._get_action_handler_with_module_context(templar)[0] def _get_action_handler_with_module_context(self, templar): - ''' + """ Returns the correct action plugin to handle the requestion task action and the module context - ''' + """ module_collection, separator, module_name = self._task.action.rpartition(".") module_prefix = module_name.split('_')[0] if module_collection: @@ -1193,9 +1217,9 @@ CLI_STUB_NAME = 'ansible_connection_cli_stub.py' def start_connection(play_context, options, task_uuid): - ''' + """ Starts the persistent connection - ''' + """ env = os.environ.copy() env.update({ diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py index f6e8c8bf7e0..75f8a698612 100644 --- a/lib/ansible/executor/task_queue_manager.py +++ b/lib/ansible/executor/task_queue_manager.py @@ -112,7 +112,7 @@ class AnsibleEndPlay(Exception): class TaskQueueManager: - ''' + """ This class handles the multiprocessing requirements of Ansible by creating a pool of worker forks, a result handler fork, and a manager object with shared datastructures/queues for coordinating @@ -120,7 +120,7 @@ class TaskQueueManager: The queue manager is responsible for loading the play strategy plugin, which dispatches the Play's tasks to hosts. - ''' + """ RUN_OK = 0 RUN_ERROR = 1 @@ -176,11 +176,11 @@ class TaskQueueManager: self._workers.append(None) def load_callbacks(self): - ''' + """ Loads all available callbacks, with the exception of those which utilize the CALLBACK_TYPE option. When CALLBACK_TYPE is set to 'stdout', only one such callback plugin will be loaded. - ''' + """ if self._callbacks_loaded: return @@ -223,7 +223,7 @@ class TaskQueueManager: callback_type = getattr(callback_plugin, 'CALLBACK_TYPE', '') callback_needs_enabled = getattr(callback_plugin, 'CALLBACK_NEEDS_ENABLED', getattr(callback_plugin, 'CALLBACK_NEEDS_WHITELIST', False)) - # try to get colleciotn world name first + # try to get collection world name first cnames = getattr(callback_plugin, '_redirected_names', []) if cnames: # store the name the plugin was loaded as, as that's what we'll need to compare to the configured callback list later @@ -269,13 +269,13 @@ class TaskQueueManager: self._callbacks_loaded = True def run(self, play): - ''' + """ Iterates over the roles/tasks in a play, using the given (or default) strategy for queueing tasks. The default is the linear strategy, which operates like classic Ansible by keeping all hosts in lock-step with a given task (meaning no hosts move on to the next task until all hosts are done with the current task). - ''' + """ if not self._callbacks_loaded: self.load_callbacks() diff --git a/lib/ansible/executor/task_result.py b/lib/ansible/executor/task_result.py index 2690f3a52bb..06e9af72e3c 100644 --- a/lib/ansible/executor/task_result.py +++ b/lib/ansible/executor/task_result.py @@ -22,11 +22,11 @@ CLEAN_EXCEPTIONS = ( class TaskResult: - ''' + """ This class is responsible for interpreting the resulting data from an executed task, and provides helper methods for determining the result of a given task. - ''' + """ def __init__(self, host, task, return_data, task_fields=None): self._host = host @@ -93,7 +93,7 @@ class TaskResult: return ret def _check_key(self, key): - '''get a specific key from the result or its items''' + """get a specific key from the result or its items""" if isinstance(self._result, dict) and key in self._result: return self._result.get(key, False) @@ -106,7 +106,7 @@ class TaskResult: def clean_copy(self): - ''' returns 'clean' taskresult object ''' + """ returns 'clean' taskresult object """ # FIXME: clean task_fields, _task and _host copies result = TaskResult(self._host, self._task, {}, self._task_fields) @@ -139,7 +139,7 @@ class TaskResult: elif self._result: result._result = module_response_deepcopy(self._result) - # actualy remove + # actually remove for remove_key in ignore: if remove_key in result._result: del result._result[remove_key] diff --git a/lib/ansible/galaxy/__init__.py b/lib/ansible/galaxy/__init__.py index cc961c5dd16..7b6fa569f4b 100644 --- a/lib/ansible/galaxy/__init__.py +++ b/lib/ansible/galaxy/__init__.py @@ -18,7 +18,7 @@ # along with Ansible. If not, see . # ######################################################################## -''' This manages remote shared Ansible objects, mainly roles''' +""" This manages remote shared Ansible objects, mainly roles""" from __future__ import annotations @@ -40,7 +40,7 @@ def get_collections_galaxy_meta_info(): class Galaxy(object): - ''' Keeps global galaxy info ''' + """ Keeps global galaxy info """ def __init__(self): # TODO: eventually remove this as it contains a mismash of properties that aren't really global diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py index 156dd4cf700..6765b087b35 100644 --- a/lib/ansible/galaxy/api.py +++ b/lib/ansible/galaxy/api.py @@ -62,8 +62,7 @@ def should_retry_error(exception): if isinstance(orig_exc, URLError): orig_exc = orig_exc.reason - # Handle common URL related errors such as TimeoutError, and BadStatusLine - # Note: socket.timeout is only required for Py3.9 + # Handle common URL related errors if isinstance(orig_exc, (TimeoutError, BadStatusLine, IncompleteRead)): return True @@ -720,7 +719,7 @@ class GalaxyAPI: display.display("Waiting until Galaxy import task %s has completed" % full_url) start = time.time() - wait = 2 + wait = C.GALAXY_COLLECTION_IMPORT_POLL_INTERVAL while timeout == 0 or (time.time() - start) < timeout: try: @@ -744,7 +743,7 @@ class GalaxyAPI: time.sleep(wait) # poor man's exponential backoff algo so we don't flood the Galaxy API, cap at 30 seconds. - wait = min(30, wait * 1.5) + wait = min(30, wait * C.GALAXY_COLLECTION_IMPORT_POLL_FACTOR) if state == 'waiting': raise AnsibleError("Timeout while waiting for the Galaxy import process to finish, check progress at '%s'" % to_native(full_url)) diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py index d2d8ae84713..829f7aa19d2 100644 --- a/lib/ansible/galaxy/collection/__init__.py +++ b/lib/ansible/galaxy/collection/__init__.py @@ -126,13 +126,13 @@ from ansible.galaxy.dependency_resolution.dataclasses import ( from ansible.galaxy.dependency_resolution.versioning import meets_requirements from ansible.plugins.loader import get_all_plugin_loaders from ansible.module_utils.common.file import S_IRWU_RG_RO, S_IRWXU_RXG_RXO, S_IXANY +from ansible.module_utils.common.sentinel import Sentinel from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.collections import is_sequence from ansible.module_utils.common.yaml import yaml_dump from ansible.utils.collection_loader import AnsibleCollectionRef from ansible.utils.display import Display from ansible.utils.hashing import secure_hash, secure_hash_s -from ansible.utils.sentinel import Sentinel display = Display() @@ -1602,13 +1602,6 @@ def install_artifact(b_coll_targz_path, b_collection_path, b_temp_path, signatur """ try: with tarfile.open(b_coll_targz_path, mode='r') as collection_tar: - # Remove this once py3.11 is our controller minimum - # Workaround for https://bugs.python.org/issue47231 - # See _extract_tar_dir - collection_tar._ansible_normalized_cache = { - m.name.removesuffix(os.path.sep): m for m in collection_tar.getmembers() - } # deprecated: description='TarFile member index' core_version='2.18' python_version='3.11' - # Verify the signature on the MANIFEST.json before extracting anything else _extract_tar_file(collection_tar, MANIFEST_FILENAME, b_collection_path, b_temp_path) @@ -1689,10 +1682,10 @@ def install_src(collection, b_collection_path, b_collection_output_path, artifac def _extract_tar_dir(tar, dirname, b_dest): """ Extracts a directory from a collection tar. """ - dirname = to_native(dirname, errors='surrogate_or_strict').removesuffix(os.path.sep) + dirname = to_native(dirname, errors='surrogate_or_strict') try: - tar_member = tar._ansible_normalized_cache[dirname] + tar_member = tar.getmember(dirname) except KeyError: raise AnsibleError("Unable to extract '%s' from collection" % dirname) diff --git a/lib/ansible/galaxy/collection/concrete_artifact_manager.py b/lib/ansible/galaxy/collection/concrete_artifact_manager.py index a67138fd2f4..fb807766f5c 100644 --- a/lib/ansible/galaxy/collection/concrete_artifact_manager.py +++ b/lib/ansible/galaxy/collection/concrete_artifact_manager.py @@ -10,6 +10,7 @@ import os import tarfile import subprocess import typing as t +import yaml from contextlib import contextmanager from hashlib import sha256 @@ -24,6 +25,7 @@ if t.TYPE_CHECKING: ) from ansible.galaxy.token import GalaxyToken +from ansible import context from ansible.errors import AnsibleError from ansible.galaxy import get_collections_galaxy_meta_info from ansible.galaxy.api import should_retry_error @@ -33,12 +35,12 @@ from ansible.module_utils.common.text.converters import to_bytes, to_native, to_ from ansible.module_utils.api import retry_with_delays_and_condition from ansible.module_utils.api import generate_jittered_backoff from ansible.module_utils.common.process import get_bin_path +from ansible.module_utils.common.sentinel import Sentinel from ansible.module_utils.common.yaml import yaml_load from ansible.module_utils.urls import open_url from ansible.utils.display import Display -from ansible.utils.sentinel import Sentinel -import yaml +import ansible.constants as C display = Display() @@ -61,7 +63,7 @@ class ConcreteArtifactsManager: """ def __init__(self, b_working_directory, validate_certs=True, keyring=None, timeout=60, required_signature_count=None, ignore_signature_errors=None): # type: (bytes, bool, str, int, str, list[str]) -> None - """Initialize ConcreteArtifactsManager caches and costraints.""" + """Initialize ConcreteArtifactsManager caches and constraints.""" self._validate_certs = validate_certs # type: bool self._artifact_cache = {} # type: dict[bytes, bytes] self._galaxy_artifact_cache = {} # type: dict[Candidate | Requirement, bytes] @@ -425,11 +427,14 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path): # Perform a shallow clone if simply cloning HEAD if version == 'HEAD': - git_clone_cmd = git_executable, 'clone', '--depth=1', git_url, to_text(b_checkout_path) + git_clone_cmd = [git_executable, 'clone', '--depth=1', git_url, to_text(b_checkout_path)] else: - git_clone_cmd = git_executable, 'clone', git_url, to_text(b_checkout_path) + git_clone_cmd = [git_executable, 'clone', git_url, to_text(b_checkout_path)] # FIXME: '--branch', version + if context.CLIARGS['ignore_certs'] or C.GALAXY_IGNORE_CERTS: + git_clone_cmd.extend(['-c', 'http.sslVerify=false']) + try: subprocess.check_call(git_clone_cmd) except subprocess.CalledProcessError as proc_err: diff --git a/lib/ansible/galaxy/collection/galaxy_api_proxy.py b/lib/ansible/galaxy/collection/galaxy_api_proxy.py index 0c1b7df0bec..046354a395d 100644 --- a/lib/ansible/galaxy/collection/galaxy_api_proxy.py +++ b/lib/ansible/galaxy/collection/galaxy_api_proxy.py @@ -27,8 +27,7 @@ display = Display() class MultiGalaxyAPIProxy: """A proxy that abstracts talking to multiple Galaxy instances.""" - def __init__(self, apis, concrete_artifacts_manager, offline=False): - # type: (t.Iterable[GalaxyAPI], ConcreteArtifactsManager, bool) -> None + def __init__(self, apis: t.Iterable[GalaxyAPI], concrete_artifacts_manager: ConcreteArtifactsManager, offline: bool = False) -> None: """Initialize the target APIs list.""" self._apis = apis self._concrete_art_mgr = concrete_artifacts_manager @@ -38,22 +37,21 @@ class MultiGalaxyAPIProxy: def is_offline_mode_requested(self): return self._offline - def _assert_that_offline_mode_is_not_requested(self): # type: () -> None + def _assert_that_offline_mode_is_not_requested(self) -> None: if self.is_offline_mode_requested: raise NotImplementedError("The calling code is not supposed to be invoked in 'offline' mode.") - def _get_collection_versions(self, requirement): - # type: (Requirement) -> t.Iterator[tuple[GalaxyAPI, str]] + def _get_collection_versions(self, requirement: Requirement) -> t.Iterator[tuple[GalaxyAPI, str]]: """Helper for get_collection_versions. Yield api, version pairs for all APIs, and reraise the last error if no valid API was found. """ if self._offline: - return [] + return found_api = False - last_error = None # type: Exception | None + last_error: Exception | None = None api_lookup_order = ( (requirement.src, ) @@ -86,8 +84,7 @@ class MultiGalaxyAPIProxy: if not found_api and last_error is not None: raise last_error - def get_collection_versions(self, requirement): - # type: (Requirement) -> t.Iterable[tuple[str, GalaxyAPI]] + def get_collection_versions(self, requirement: Requirement) -> t.Iterable[tuple[str, GalaxyAPI]]: """Get a set of unique versions for FQCN on Galaxy servers.""" if requirement.is_concrete_artifact: return { @@ -110,8 +107,7 @@ class MultiGalaxyAPIProxy: ) ) - def get_collection_version_metadata(self, collection_candidate): - # type: (Candidate) -> CollectionVersionMetadata + def get_collection_version_metadata(self, collection_candidate: Candidate) -> CollectionVersionMetadata: """Retrieve collection metadata of a given candidate.""" self._assert_that_offline_mode_is_not_requested() @@ -160,8 +156,7 @@ class MultiGalaxyAPIProxy: raise last_err - def get_collection_dependencies(self, collection_candidate): - # type: (Candidate) -> dict[str, str] + def get_collection_dependencies(self, collection_candidate: Candidate) -> dict[str, str]: # FIXME: return Requirement instances instead? """Retrieve collection dependencies of a given candidate.""" if collection_candidate.is_concrete_artifact: @@ -177,13 +172,12 @@ class MultiGalaxyAPIProxy: dependencies ) - def get_signatures(self, collection_candidate): - # type: (Candidate) -> list[str] + def get_signatures(self, collection_candidate: Candidate) -> list[str]: self._assert_that_offline_mode_is_not_requested() namespace = collection_candidate.namespace name = collection_candidate.name version = collection_candidate.ver - last_err = None # type: Exception | None + last_err: Exception | None = None api_lookup_order = ( (collection_candidate.src, ) diff --git a/lib/ansible/galaxy/dependency_resolution/providers.py b/lib/ansible/galaxy/dependency_resolution/providers.py index 716f5423b37..d336c3441e2 100644 --- a/lib/ansible/galaxy/dependency_resolution/providers.py +++ b/lib/ansible/galaxy/dependency_resolution/providers.py @@ -39,7 +39,7 @@ except ImportError: # TODO: add python requirements to ansible-test's ansible-core distribution info and remove the hardcoded lowerbound/upperbound fallback RESOLVELIB_LOWERBOUND = SemanticVersion("0.5.3") -RESOLVELIB_UPPERBOUND = SemanticVersion("1.1.0") +RESOLVELIB_UPPERBOUND = SemanticVersion("2.0.0") RESOLVELIB_VERSION = SemanticVersion.from_loose_version(LooseVersion(resolvelib_version)) @@ -126,7 +126,7 @@ class CollectionDependencyProviderBase(AbstractProvider): the current candidate list * ``parent`` specifies the candidate that provides - (dependend on) the requirement, or `None` + (depended on) the requirement, or `None` to indicate a root requirement. resolvelib >=0.7.0, < 0.8.0 @@ -202,7 +202,7 @@ class CollectionDependencyProviderBase(AbstractProvider): remote archives), the one-and-only match is returned For a "named" requirement, Galaxy-compatible APIs are consulted - to find concrete candidates for this requirement. Of theres a + to find concrete candidates for this requirement. If there's a pre-installed candidate, it's prepended in front of others. resolvelib >=0.5.3, <0.6.0 @@ -437,7 +437,7 @@ class CollectionDependencyProviderBase(AbstractProvider): # FIXME: differs. So how do we resolve this case? Priority? # FIXME: Taking into account a pinned hash? Exploding on # FIXME: any differences? - # NOTE: The underlying implmentation currently uses first found + # NOTE: The underlying implementation currently uses first found req_map = self._api_proxy.get_collection_dependencies(candidate) # NOTE: This guard expression MUST perform an early exit only diff --git a/lib/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py index d00b8a69980..9ee7f3b9054 100644 --- a/lib/ansible/galaxy/role.py +++ b/lib/ansible/galaxy/role.py @@ -185,13 +185,11 @@ class GalaxyRole(object): info_path = os.path.join(self.path, self.META_INSTALL) if os.path.isfile(info_path): try: - f = open(info_path, 'r') - self._install_info = yaml_load(f) + with open(info_path, 'r') as f: + self._install_info = yaml_load(f) except Exception: display.vvvvv("Unable to load Galaxy install info for %s" % self.name) return False - finally: - f.close() return self._install_info @property @@ -256,7 +254,7 @@ class GalaxyRole(object): display.display("- downloading role from %s" % archive_url) try: - url_file = open_url(archive_url, validate_certs=self._validate_certs, http_agent=user_agent()) + url_file = open_url(archive_url, validate_certs=self._validate_certs, http_agent=user_agent(), timeout=60) temp_file = tempfile.NamedTemporaryFile(delete=False) data = url_file.read() while data: @@ -470,12 +468,10 @@ class GalaxyRole(object): meta_path = os.path.join(self.path, meta_requirements) if os.path.isfile(meta_path): try: - f = open(meta_path, 'r') - self._requirements = yaml_load(f) + with open(meta_path, 'r') as f: + self._requirements = yaml_load(f) except Exception: display.vvvvv("Unable to load requirements for %s" % self.name) - finally: - f.close() break diff --git a/lib/ansible/galaxy/token.py b/lib/ansible/galaxy/token.py index 183e2af109e..9b82ad6c62c 100644 --- a/lib/ansible/galaxy/token.py +++ b/lib/ansible/galaxy/token.py @@ -21,12 +21,16 @@ from __future__ import annotations import base64 -import os import json +import os +import time from stat import S_IRUSR, S_IWUSR +from urllib.error import HTTPError from ansible import constants as C +from ansible.galaxy.api import GalaxyError from ansible.galaxy.user_agent import user_agent +from ansible.module_utils.common.sentinel import Sentinel as NoTokenSentinel from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.yaml import yaml_dump, yaml_load from ansible.module_utils.urls import open_url @@ -35,17 +39,11 @@ from ansible.utils.display import Display display = Display() -class NoTokenSentinel(object): - """ Represents an ansible.cfg server with not token defined (will ignore cmdline and GALAXY_TOKEN_PATH. """ - def __new__(cls, *args, **kwargs): - return cls - - class KeycloakToken(object): - '''A token granted by a Keycloak server. + """A token granted by a Keycloak server. Like sso.redhat.com as used by cloud.redhat.com - ie Automation Hub''' + ie Automation Hub""" token_type = 'Bearer' @@ -57,12 +55,16 @@ class KeycloakToken(object): self.client_id = client_id if self.client_id is None: self.client_id = 'cloud-services' + self._expiration = None def _form_payload(self): return 'grant_type=refresh_token&client_id=%s&refresh_token=%s' % (self.client_id, self.access_token) def get(self): + if self._expiration and time.time() >= self._expiration: + self._token = None + if self._token: return self._token @@ -76,15 +78,20 @@ class KeycloakToken(object): # or 'azp' (Authorized party - the party to which the ID Token was issued) payload = self._form_payload() - resp = open_url(to_native(self.auth_url), - data=payload, - validate_certs=self.validate_certs, - method='POST', - http_agent=user_agent()) + try: + resp = open_url(to_native(self.auth_url), + data=payload, + validate_certs=self.validate_certs, + method='POST', + http_agent=user_agent()) + except HTTPError as e: + raise GalaxyError(e, 'Unable to get access token') - # TODO: handle auth errors + data = json.load(resp) - data = json.loads(to_text(resp.read(), errors='surrogate_or_strict')) + # So that we have a buffer, expire the token in ~2/3 the given value + expires_in = data['expires_in'] // 3 * 2 + self._expiration = time.time() + expires_in # - extract 'access_token' self._token = data.get('access_token') @@ -98,7 +105,7 @@ class KeycloakToken(object): class GalaxyToken(object): - ''' Class to storing and retrieving local galaxy token ''' + """ Class to storing and retrieving local galaxy token """ token_type = 'Token' diff --git a/lib/ansible/inventory/data.py b/lib/ansible/inventory/data.py index 7282d6fb751..691ad5bed42 100644 --- a/lib/ansible/inventory/data.py +++ b/lib/ansible/inventory/data.py @@ -101,7 +101,7 @@ class InventoryData(object): return new_host def reconcile_inventory(self): - ''' Ensure inventory basic rules, run after updates ''' + """ Ensure inventory basic rules, run after updates """ display.debug('Reconcile groups and hosts in inventory.') self.current_source = None @@ -145,7 +145,7 @@ class InventoryData(object): self._groups_dict_cache = {} def get_host(self, hostname): - ''' fetch host object using name deal with implicit localhost ''' + """ fetch host object using name deal with implicit localhost """ matching_host = self.hosts.get(hostname, None) @@ -157,7 +157,7 @@ class InventoryData(object): return matching_host def add_group(self, group): - ''' adds a group to inventory if not there already, returns named actually used ''' + """ adds a group to inventory if not there already, returns named actually used """ if group: if not isinstance(group, string_types): @@ -188,7 +188,7 @@ class InventoryData(object): h.remove_group(group) def add_host(self, host, group=None, port=None): - ''' adds a host to inventory and possibly a group if not there already ''' + """ adds a host to inventory and possibly a group if not there already """ if host: if not isinstance(host, string_types): @@ -242,7 +242,7 @@ class InventoryData(object): g.remove_host(host) def set_variable(self, entity, varname, value): - ''' sets a variable for an inventory object ''' + """ sets a variable for an inventory object """ if entity in self.groups: inv_object = self.groups[entity] @@ -255,7 +255,7 @@ class InventoryData(object): display.debug('set %s for %s' % (varname, entity)) def add_child(self, group, child): - ''' Add host or group to group ''' + """ Add host or group to group """ added = False if group in self.groups: g = self.groups[group] diff --git a/lib/ansible/inventory/group.py b/lib/ansible/inventory/group.py index 73c913ac9ee..335f60127c3 100644 --- a/lib/ansible/inventory/group.py +++ b/lib/ansible/inventory/group.py @@ -59,7 +59,7 @@ class InventoryObjectType(Enum): class Group: - ''' a group of ansible hosts ''' + """ a group of ansible hosts """ base_type = InventoryObjectType.GROUP # __slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ] @@ -120,7 +120,7 @@ class Group: self.parent_groups.append(g) def _walk_relationship(self, rel, include_self=False, preserve_ordering=False): - ''' + """ Given `rel` that is an iterable property of Group, consitituting a directed acyclic graph among all groups, Returns a set of all groups in full tree @@ -132,7 +132,7 @@ class Group: | / are directed upward F Called on F, returns set of (A, B, C, D, E) - ''' + """ seen = set([]) unprocessed = set(getattr(self, rel)) if include_self: diff --git a/lib/ansible/inventory/host.py b/lib/ansible/inventory/host.py index b7aea10e726..fafa9520928 100644 --- a/lib/ansible/inventory/host.py +++ b/lib/ansible/inventory/host.py @@ -28,7 +28,7 @@ __all__ = ['Host'] class Host: - ''' a single ansible host ''' + """ a single ansible host """ base_type = InventoryObjectType.HOST # __slots__ = [ 'name', 'vars', 'groups' ] diff --git a/lib/ansible/inventory/manager.py b/lib/ansible/inventory/manager.py index 96df1f46477..ba6397f1787 100644 --- a/lib/ansible/inventory/manager.py +++ b/lib/ansible/inventory/manager.py @@ -50,7 +50,7 @@ IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS)) PATTERN_WITH_SUBSCRIPT = re.compile( - r'''^ + r"""^ (.+) # A pattern expression ending with... \[(?: # A [subscript] expression comprising: (-?[0-9]+)| # A single positive or negative number @@ -58,12 +58,12 @@ PATTERN_WITH_SUBSCRIPT = re.compile( ([0-9]*) )\] $ - ''', re.X + """, re.X ) def order_patterns(patterns): - ''' takes a list of patterns and reorders them by modifier to apply them consistently ''' + """ takes a list of patterns and reorders them by modifier to apply them consistently """ # FIXME: this goes away if we apply patterns incrementally or by groups pattern_regular = [] @@ -125,19 +125,19 @@ def split_host_pattern(pattern): # This mishandles IPv6 addresses, and is retained only for backwards # compatibility. patterns = re.findall( - to_text(r'''(?: # We want to match something comprising: + to_text(r"""(?: # We want to match something comprising: [^\s:\[\]] # (anything other than whitespace or ':[]' | # ...or... \[[^\]]*\] # a single complete bracketed expression) )+ # occurring once or more - '''), pattern, re.X + """), pattern, re.X ) return [p.strip() for p in patterns if p.strip()] class InventoryManager(object): - ''' Creates and manages inventory ''' + """ Creates and manages inventory """ def __init__(self, loader, sources=None, parse=True, cache=True): @@ -197,7 +197,7 @@ class InventoryManager(object): return self._inventory.get_host(hostname) def _fetch_inventory_plugins(self): - ''' sets up loaded inventory plugins for usage ''' + """ sets up loaded inventory plugins for usage """ display.vvvv('setting up inventory plugins') @@ -215,7 +215,7 @@ class InventoryManager(object): return plugins def parse_sources(self, cache=False): - ''' iterate over inventory sources and parse each one to populate it''' + """ iterate over inventory sources and parse each one to populate it""" parsed = False # allow for multiple inventory parsing @@ -243,7 +243,7 @@ class InventoryManager(object): host.vars = combine_vars(host.vars, get_vars_from_inventory_sources(self._loader, self._sources, [host], 'inventory')) def parse_source(self, source, cache=False): - ''' Generate or update inventory for the source provided ''' + """ Generate or update inventory for the source provided """ parsed = False failures = [] @@ -335,12 +335,12 @@ class InventoryManager(object): return parsed def clear_caches(self): - ''' clear all caches ''' + """ clear all caches """ self._hosts_patterns_cache = {} self._pattern_cache = {} def refresh_inventory(self): - ''' recalculate inventory ''' + """ recalculate inventory """ self.clear_caches() self._inventory = InventoryData() @@ -657,9 +657,9 @@ class InventoryManager(object): self._pattern_cache = {} def add_dynamic_host(self, host_info, result_item): - ''' + """ Helper function to add a new host to inventory based on a task result. - ''' + """ changed = False if not result_item.get('refresh'): @@ -697,10 +697,10 @@ class InventoryManager(object): result_item['changed'] = changed def add_dynamic_group(self, host, result_item): - ''' + """ Helper function to add a group (if it does not exist), and to assign the specified host to that group. - ''' + """ changed = False diff --git a/lib/ansible/module_utils/_internal/__init__.py b/lib/ansible/module_utils/_internal/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/lib/ansible/module_utils/_internal/_concurrent/__init__.py b/lib/ansible/module_utils/_internal/_concurrent/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/lib/ansible/module_utils/_internal/_concurrent/_daemon_threading.py b/lib/ansible/module_utils/_internal/_concurrent/_daemon_threading.py new file mode 100644 index 00000000000..0b32a062fed --- /dev/null +++ b/lib/ansible/module_utils/_internal/_concurrent/_daemon_threading.py @@ -0,0 +1,28 @@ +"""Proxy stdlib threading module that only supports non-joinable daemon threads.""" +# NB: all new local module attrs are _ prefixed to ensure an identical public attribute surface area to the module we're proxying + +from __future__ import annotations as _annotations + +import threading as _threading +import typing as _t + + +class _DaemonThread(_threading.Thread): + """ + Daemon-only Thread subclass; prevents running threads of this type from blocking interpreter shutdown and process exit. + The join() method is a no-op. + """ + + def __init__(self, *args, daemon: bool | None = None, **kwargs) -> None: + super().__init__(*args, daemon=daemon or True, **kwargs) + + def join(self, timeout=None) -> None: + """ThreadPoolExecutor's atexit handler joins all queue threads before allowing shutdown; prevent them from blocking.""" + + +Thread = _DaemonThread # shadow the real Thread attr with our _DaemonThread + + +def __getattr__(name: str) -> _t.Any: + """Delegate anything not defined locally to the real `threading` module.""" + return getattr(_threading, name) diff --git a/lib/ansible/module_utils/_internal/_concurrent/_futures.py b/lib/ansible/module_utils/_internal/_concurrent/_futures.py new file mode 100644 index 00000000000..2ca493f6873 --- /dev/null +++ b/lib/ansible/module_utils/_internal/_concurrent/_futures.py @@ -0,0 +1,21 @@ +"""Utilities for concurrent code execution using futures.""" + +from __future__ import annotations + +import concurrent.futures +import types + +from . import _daemon_threading + + +class DaemonThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): + """ThreadPoolExecutor subclass that creates non-joinable daemon threads for non-blocking pool and process shutdown with abandoned threads.""" + + atc = concurrent.futures.ThreadPoolExecutor._adjust_thread_count + + # clone the base class `_adjust_thread_count` method with a copy of its globals dict + _adjust_thread_count = types.FunctionType(atc.__code__, atc.__globals__.copy(), name=atc.__name__, argdefs=atc.__defaults__, closure=atc.__closure__) + # patch the method closure's `threading` module import to use our daemon-only thread factory instead + _adjust_thread_count.__globals__.update(threading=_daemon_threading) + + del atc # don't expose this as a class attribute diff --git a/lib/ansible/module_utils/api.py b/lib/ansible/module_utils/api.py index 8f08772278e..2415c38a839 100644 --- a/lib/ansible/module_utils/api.py +++ b/lib/ansible/module_utils/api.py @@ -28,7 +28,7 @@ from __future__ import annotations import copy import functools import itertools -import random +import secrets import sys import time @@ -131,7 +131,7 @@ def generate_jittered_backoff(retries=10, delay_base=3, delay_threshold=60): :param delay_threshold: The maximum time in seconds for any delay. """ for retry in range(0, retries): - yield random.randint(0, min(delay_threshold, delay_base * 2 ** retry)) + yield secrets.randbelow(min(delay_threshold, delay_base * 2 ** retry)) def retry_never(exception_or_result): diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index 154b081c78a..fbc5ea17630 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -6,6 +6,7 @@ from __future__ import annotations import json import sys +import typing as t # Used for determining if the system is running a new enough python version # and should only restrict on our documented minimum versions @@ -199,14 +200,14 @@ PERMS_RE = re.compile(r'^[rwxXstugo]*$') # def get_platform(): - ''' + """ **Deprecated** Use :py:func:`platform.system` directly. :returns: Name of the platform the module is running on in a native string Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is the result of calling :py:func:`platform.system`. - ''' + """ return platform.system() # End deprecated functions @@ -231,7 +232,7 @@ def get_all_subclasses(cls): def heuristic_log_sanitize(data, no_log_values=None): - ''' Remove strings that look like passwords from log messages ''' + """ Remove strings that look like passwords from log messages """ # Currently filters: # user:pass@foo/whatever and http://username:pass@wherever/foo # This code has false positives and consumes parts of logs that are @@ -296,7 +297,7 @@ def heuristic_log_sanitize(data, no_log_values=None): def _load_params(): - ''' read the modules parameters and store them globally. + """ read the modules parameters and store them globally. This function may be needed for certain very dynamic custom modules which want to process the parameters that are being handed the module. Since @@ -305,7 +306,7 @@ def _load_params(): will try not to break it gratuitously. It is certainly more future-proof to call this function and consume its outputs than to implement the logic inside it as a copy in your own code. - ''' + """ global _ANSIBLE_ARGS if _ANSIBLE_ARGS is not None: buffer = _ANSIBLE_ARGS @@ -316,9 +317,8 @@ def _load_params(): # We control the args and we pass them as utf8 if len(sys.argv) > 1: if os.path.isfile(sys.argv[1]): - fd = open(sys.argv[1], 'rb') - buffer = fd.read() - fd.close() + with open(sys.argv[1], 'rb') as fd: + buffer = fd.read() else: buffer = sys.argv[1].encode('utf-8', errors='surrogateescape') # default case, read from stdin @@ -363,13 +363,13 @@ class AnsibleModule(object): required_one_of=None, add_file_common_args=False, supports_check_mode=False, required_if=None, required_by=None): - ''' + """ Common code for quickly building an ansible module in Python (although you can write modules with anything that can return JSON). See :ref:`developing_modules_general` for a general introduction and :ref:`developing_program_flow_modules` for more detailed explanation. - ''' + """ self._name = os.path.basename(__file__) # initialize name until we can parse from options self.argument_spec = argument_spec @@ -394,7 +394,6 @@ class AnsibleModule(object): # run_command invocation self.run_command_environ_update = {} self._clean = {} - self._string_conversion_action = '' self.aliases = {} self._legal_inputs = [] @@ -516,13 +515,13 @@ class AnsibleModule(object): self.log('[DEPRECATION WARNING] %s %s' % (msg, version)) def load_file_common_arguments(self, params, path=None): - ''' + """ many modules deal with files, this encapsulates common options that the file module accepts such that it is directly available to all modules and they can share code. Allows to overwrite the path/dest module argument by providing path. - ''' + """ if path is None: path = params.get('path', params.get('dest', None)) @@ -635,12 +634,12 @@ class AnsibleModule(object): return (uid, gid) def find_mount_point(self, path): - ''' + """ Takes a path and returns its mount point :param path: a string type with a filesystem path :returns: the path to the mount point as a text type - ''' + """ b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict')) while not os.path.ismount(b_path): @@ -654,9 +653,8 @@ class AnsibleModule(object): NFS or other 'special' fs mount point, otherwise the return will be (False, None). """ try: - f = open('/proc/mounts', 'r') - mount_data = f.readlines() - f.close() + with open('/proc/mounts', 'r') as f: + mount_data = f.readlines() except Exception: return (False, None) @@ -1115,10 +1113,10 @@ class AnsibleModule(object): return self.set_fs_attributes_if_different(file_args, changed, diff, expand) def add_path_info(self, kwargs): - ''' + """ for results that are files, supplement the info about the file in the return path with stats about the file path. - ''' + """ path = kwargs.get('path', kwargs.get('dest', None)) if path is None: @@ -1155,10 +1153,10 @@ class AnsibleModule(object): return kwargs def _check_locale(self): - ''' + """ Uses the locale module to test the currently set locale (per the LANG and LC_CTYPE environment settings) - ''' + """ try: # setting the locale to '' uses the default locale # as it would be returned by locale.getdefaultlocale() @@ -1202,14 +1200,15 @@ class AnsibleModule(object): setattr(self, PASS_VARS[k][0], PASS_VARS[k][1]) def safe_eval(self, value, locals=None, include_exceptions=False): + # deprecated: description='no longer used in the codebase' core_version='2.21' return safe_eval(value, locals, include_exceptions) def _load_params(self): - ''' read the input and set the params attribute. + """ read the input and set the params attribute. This method is for backwards compatibility. The guts of the function were moved out in 2.1 so that custom modules could read the parameters. - ''' + """ # debug overrides to read args from file or cmdline self.params = _load_params() @@ -1296,7 +1295,7 @@ class AnsibleModule(object): self._log_to_syslog(journal_msg) def _log_invocation(self): - ''' log that ansible ran the module ''' + """ log that ansible ran the module """ # TODO: generalize a separate log function and make log_invocation use it # Sanitize possible password argument when logging. log_args = dict() @@ -1349,7 +1348,7 @@ class AnsibleModule(object): return None def get_bin_path(self, arg, required=False, opt_dirs=None): - ''' + """ Find system executable in PATH. :param arg: The executable to find. @@ -1357,7 +1356,7 @@ class AnsibleModule(object): :param opt_dirs: optional list of directories to search in addition to ``PATH`` :returns: if found return full path; otherwise return original arg, unless 'warning' then return None :raises: Sysexit: if arg is not found and required=True (via fail_json) - ''' + """ bin_path = None try: @@ -1369,7 +1368,7 @@ class AnsibleModule(object): return bin_path def boolean(self, arg): - '''Convert the argument to a boolean''' + """Convert the argument to a boolean""" if arg is None: return arg @@ -1431,11 +1430,7 @@ class AnsibleModule(object): kwargs['deprecations'] = deprecations # preserve bools/none from no_log - # TODO: once python version on target high enough, dict comprh - preserved = {} - for k, v in kwargs.items(): - if v is None or isinstance(v, bool): - preserved[k] = v + preserved = {k: v for k, v in kwargs.items() if v is None or isinstance(v, bool)} # strip no_log collisions kwargs = remove_values(kwargs, self.no_log_values) @@ -1445,15 +1440,15 @@ class AnsibleModule(object): print('\n%s' % self.jsonify(kwargs)) - def exit_json(self, **kwargs): - ''' return from the module, without error ''' + def exit_json(self, **kwargs) -> t.NoReturn: + """ return from the module, without error """ self.do_cleanup_files() self._return_formatted(kwargs) sys.exit(0) - def fail_json(self, msg, **kwargs): - ''' return from the module, with an error message ''' + def fail_json(self, msg, **kwargs) -> t.NoReturn: + """ return from the module, with an error message """ kwargs['failed'] = True kwargs['msg'] = msg @@ -1476,7 +1471,7 @@ class AnsibleModule(object): self.fail_json(msg=to_native(e)) def digest_from_file(self, filename, algorithm): - ''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. ''' + """ Return hex digest of local file for a digest_method specified by name, or None if file is not present. """ b_filename = to_bytes(filename, errors='surrogate_or_strict') if not os.path.exists(b_filename): @@ -1504,7 +1499,7 @@ class AnsibleModule(object): return digest_method.hexdigest() def md5(self, filename): - ''' Return MD5 hex digest of local file using digest_from_file(). + """ Return MD5 hex digest of local file using digest_from_file(). Do not use this function unless you have no other choice for: 1) Optional backwards compatibility @@ -1513,21 +1508,21 @@ class AnsibleModule(object): This function will not work on systems complying with FIPS-140-2. Most uses of this function can use the module.sha1 function instead. - ''' + """ if 'md5' not in AVAILABLE_HASH_ALGORITHMS: raise ValueError('MD5 not available. Possibly running in FIPS mode') return self.digest_from_file(filename, 'md5') def sha1(self, filename): - ''' Return SHA1 hex digest of local file using digest_from_file(). ''' + """ Return SHA1 hex digest of local file using digest_from_file(). """ return self.digest_from_file(filename, 'sha1') def sha256(self, filename): - ''' Return SHA-256 hex digest of local file using digest_from_file(). ''' + """ Return SHA-256 hex digest of local file using digest_from_file(). """ return self.digest_from_file(filename, 'sha256') def backup_local(self, fn): - '''make a date-marked backup of the specified file, return True or False on success or failure''' + """make a date-marked backup of the specified file, return True or False on success or failure""" backupdest = '' if os.path.exists(fn): @@ -1556,7 +1551,7 @@ class AnsibleModule(object): # Similar to shutil.copy(), but metadata is copied as well - in fact, # this is just shutil.copy() followed by copystat(). This is similar # to the Unix command cp -p. - # + # shutil.copystat(src, dst) # Copy the permission bits, last access time, last modification time, # and flags from src to dst. The file contents, owner, and group are @@ -1585,9 +1580,9 @@ class AnsibleModule(object): self.set_attributes_if_different(dest, current_attribs, True) def atomic_move(self, src, dest, unsafe_writes=False, keep_dest_attrs=True): - '''atomically move src to dest, copying attributes from dest, returns true on success + """atomically move src to dest, copying attributes from dest, returns true on success it uses os.rename to ensure this as it is an atomic operation, rest of the function is - to work around limitations, corner cases and ensure selinux context is saved if possible''' + to work around limitations, corner cases and ensure selinux context is saved if possible""" context = None dest_stat = None b_src = to_bytes(src, errors='surrogate_or_strict') @@ -1597,6 +1592,7 @@ class AnsibleModule(object): dest_stat = os.stat(b_dest) os.chown(b_src, dest_stat.st_uid, dest_stat.st_gid) shutil.copystat(b_dest, b_src) + os.utime(b_src, times=(time.time(), time.time())) except OSError as e: if e.errno != errno.EPERM: raise @@ -1658,8 +1654,10 @@ class AnsibleModule(object): b_tmp_dest_name, context, False) try: tmp_stat = os.stat(b_tmp_dest_name) - if keep_dest_attrs and dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid): - os.chown(b_tmp_dest_name, dest_stat.st_uid, dest_stat.st_gid) + if keep_dest_attrs: + if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid): + os.chown(b_tmp_dest_name, dest_stat.st_uid, dest_stat.st_gid) + os.utime(b_tmp_dest_name, times=(time.time(), time.time())) except OSError as e: if e.errno != errno.EPERM: raise @@ -1685,8 +1683,12 @@ class AnsibleModule(object): umask = os.umask(0) os.umask(umask) os.chmod(b_dest, S_IRWU_RWG_RWO & ~umask) + dest_dir_stat = os.stat(os.path.dirname(b_dest)) try: - os.chown(b_dest, os.geteuid(), os.getegid()) + if dest_dir_stat.st_mode & stat.S_ISGID: + os.chown(b_dest, os.geteuid(), dest_dir_stat.st_gid) + else: + os.chown(b_dest, os.geteuid(), os.getegid()) except OSError: # We're okay with trying our best here. If the user is not # root (or old Unices) they won't be able to chown. @@ -1748,7 +1750,7 @@ class AnsibleModule(object): def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict', expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None, ignore_invalid_cwd=True, handle_exceptions=True): - ''' + """ Execute a command, returns rc, stdout, and stderr. The mechanism of this method for reading stdout and stderr differs from @@ -1817,7 +1819,7 @@ class AnsibleModule(object): byte strings. On python3, stdout and stderr are text strings converted according to the encoding and errors parameters. If you want byte strings on python3, use encoding=None to turn decoding to text off. - ''' + """ # used by clean args later on self._clean = None @@ -2023,9 +2025,8 @@ class AnsibleModule(object): def append_to_file(self, filename, str): filename = os.path.expandvars(os.path.expanduser(filename)) - fh = open(filename, 'a') - fh.write(str) - fh.close() + with open(filename, 'a') as fh: + fh.write(str) def bytes_to_human(self, size): return bytes_to_human(size) @@ -2064,13 +2065,7 @@ def get_module_path(): def __getattr__(importable_name): """Inject import-time deprecation warnings.""" - if importable_name == 'get_exception': - from ansible.module_utils.pycompat24 import get_exception - importable = get_exception - elif importable_name in {'literal_eval', '_literal_eval'}: - from ast import literal_eval - importable = literal_eval - elif importable_name == 'datetime': + if importable_name == 'datetime': import datetime importable = datetime elif importable_name == 'signal': diff --git a/lib/ansible/module_utils/common/_utils.py b/lib/ansible/module_utils/common/_utils.py index 8323e7c6432..deab1fcdf9c 100644 --- a/lib/ansible/module_utils/common/_utils.py +++ b/lib/ansible/module_utils/common/_utils.py @@ -10,7 +10,7 @@ from __future__ import annotations def get_all_subclasses(cls): - ''' + """ Recursively search and find all subclasses of a given class :arg cls: A python class @@ -21,7 +21,7 @@ def get_all_subclasses(cls): of a class exist. However, `__subclasses__` only goes one level deep. This function searches each child class's `__subclasses__` method to find all of the descendent classes. It then returns an iterable of the descendent classes. - ''' + """ # Retrieve direct subclasses subclasses = set(cls.__subclasses__()) to_visit = list(subclasses) diff --git a/lib/ansible/module_utils/common/collections.py b/lib/ansible/module_utils/common/collections.py index e4cb9ec1d70..28c53e14e2c 100644 --- a/lib/ansible/module_utils/common/collections.py +++ b/lib/ansible/module_utils/common/collections.py @@ -65,7 +65,7 @@ class ImmutableDict(Hashable, Mapping): def is_string(seq): - """Identify whether the input has a string-like type (inclding bytes).""" + """Identify whether the input has a string-like type (including bytes).""" # AnsibleVaultEncryptedUnicode inherits from Sequence, but is expected to be a string like object return isinstance(seq, (text_type, binary_type)) or getattr(seq, '__ENCRYPTED__', False) diff --git a/lib/ansible/module_utils/common/dict_transformations.py b/lib/ansible/module_utils/common/dict_transformations.py index 9c59d4ade47..8d318f5ef63 100644 --- a/lib/ansible/module_utils/common/dict_transformations.py +++ b/lib/ansible/module_utils/common/dict_transformations.py @@ -109,9 +109,9 @@ def _camel_to_snake(name, reversible=False): def dict_merge(a, b): - '''recursively merges dicts. not just simple a['key'] = b['key'], if + """recursively merges dicts. not just simple a['key'] = b['key'], if both a and b have a key whose value is a dict then dict_merge is called - on both values and the result stored in the returned dictionary.''' + on both values and the result stored in the returned dictionary.""" if not isinstance(b, dict): return b result = deepcopy(a) diff --git a/lib/ansible/module_utils/common/file.py b/lib/ansible/module_utils/common/file.py index b62e4c64f50..4c54b184111 100644 --- a/lib/ansible/module_utils/common/file.py +++ b/lib/ansible/module_utils/common/file.py @@ -7,12 +7,6 @@ import os import stat import re -try: - import selinux # pylint: disable=unused-import - HAVE_SELINUX = True -except ImportError: - HAVE_SELINUX = False - FILE_ATTRIBUTES = { 'A': 'noatime', @@ -61,7 +55,7 @@ def is_executable(path): # This method is reused by the basic module, # the repetition helps the basic module's html documentation come out right. # http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_docstring_signature - '''is_executable(path) + """is_executable(path) is the given path executable? @@ -72,7 +66,7 @@ def is_executable(path): * Does not account for FSACLs. * Most times we really want to know "Can the current user execute this file". This function does not tell us that, only if any execute bit is set. - ''' + """ # These are all bitfields so first bitwise-or all the permissions we're # looking for, then bitwise-and with the file's mode to determine if any # execute bits are set. diff --git a/lib/ansible/module_utils/common/json.py b/lib/ansible/module_utils/common/json.py index 8038552e0ac..fe65a8d701c 100644 --- a/lib/ansible/module_utils/common/json.py +++ b/lib/ansible/module_utils/common/json.py @@ -42,9 +42,9 @@ def json_dump(structure): class AnsibleJSONEncoder(json.JSONEncoder): - ''' + """ Simple encoder class to deal with JSON encoding of Ansible internal types - ''' + """ def __init__(self, preprocess_unsafe=False, vault_to_text=False, **kwargs): self._preprocess_unsafe = preprocess_unsafe diff --git a/lib/ansible/module_utils/common/locale.py b/lib/ansible/module_utils/common/locale.py index 57b27a26eb8..872931ced10 100644 --- a/lib/ansible/module_utils/common/locale.py +++ b/lib/ansible/module_utils/common/locale.py @@ -7,7 +7,7 @@ from ansible.module_utils.common.text.converters import to_native def get_best_parsable_locale(module, preferences=None, raise_on_locale=False): - ''' + """ Attempts to return the best possible locale for parsing output in English useful for scraping output with i18n tools. When this raises an exception and the caller wants to continue, it should use the 'C' locale. @@ -17,7 +17,7 @@ def get_best_parsable_locale(module, preferences=None, raise_on_locale=False): :param raise_on_locale: boolean that determines if we raise exception or not due to locale CLI issues :returns: The first matched preferred locale or 'C' which is the default - ''' + """ found = 'C' # default posix, its ascii but always there try: diff --git a/lib/ansible/module_utils/common/parameters.py b/lib/ansible/module_utils/common/parameters.py index b9f5be43a70..c80ca6ccf16 100644 --- a/lib/ansible/module_utils/common/parameters.py +++ b/lib/ansible/module_utils/common/parameters.py @@ -96,7 +96,6 @@ PASS_VARS = { 'selinux_special_fs': ('_selinux_special_fs', ['fuse', 'nfs', 'vboxsf', 'ramfs', '9p', 'vfat']), 'shell_executable': ('_shell', '/bin/sh'), 'socket': ('_socket_path', None), - 'string_conversion_action': ('_string_conversion_action', 'warn'), 'syslog_facility': ('_syslog_facility', 'INFO'), 'tmpdir': ('_tmpdir', None), 'verbosity': ('_verbosity', 0), diff --git a/lib/ansible/module_utils/common/process.py b/lib/ansible/module_utils/common/process.py index 85ffd2195e7..eb11f8e44d1 100644 --- a/lib/ansible/module_utils/common/process.py +++ b/lib/ansible/module_utils/common/process.py @@ -10,7 +10,7 @@ from ansible.module_utils.common.warnings import deprecate def get_bin_path(arg, opt_dirs=None, required=None): - ''' + """ Find system executable in PATH. Raises ValueError if the executable is not found. :param arg: the executable to find @@ -24,7 +24,7 @@ def get_bin_path(arg, opt_dirs=None, required=None): In addition to PATH and opt_dirs, this function also looks through /sbin, /usr/sbin and /usr/local/sbin. A lot of modules, especially for gathering facts, depend on this behaviour. - ''' + """ if required is not None: deprecate( msg="The `required` parameter in `get_bin_path` API is deprecated.", diff --git a/lib/ansible/module_utils/common/respawn.py b/lib/ansible/module_utils/common/respawn.py index 0f57c154576..2938c86a487 100644 --- a/lib/ansible/module_utils/common/respawn.py +++ b/lib/ansible/module_utils/common/respawn.py @@ -6,6 +6,7 @@ from __future__ import annotations import os import subprocess import sys +import typing as t from ansible.module_utils.common.text.converters import to_bytes @@ -14,7 +15,7 @@ def has_respawned(): return hasattr(sys.modules['__main__'], '_respawned') -def respawn_module(interpreter_path): +def respawn_module(interpreter_path) -> t.NoReturn: """ Respawn the currently-running Ansible Python module under the specified Python interpreter. @@ -74,7 +75,7 @@ def _create_payload(): raise Exception('unable to access ansible.module_utils.basic._ANSIBLE_ARGS (not launched by AnsiballZ?)') module_fqn = sys.modules['__main__']._module_fqn modlib_path = sys.modules['__main__']._modlib_path - respawn_code_template = ''' + respawn_code_template = """ import runpy import sys @@ -89,7 +90,7 @@ if __name__ == '__main__': basic._ANSIBLE_ARGS = smuggled_args runpy.run_module(module_fqn, init_globals=dict(_respawned=True), run_name='__main__', alter_sys=True) - ''' + """ respawn_code = respawn_code_template.format(module_fqn=module_fqn, modlib_path=modlib_path, smuggled_args=smuggled_args.strip()) diff --git a/lib/ansible/module_utils/common/sentinel.py b/lib/ansible/module_utils/common/sentinel.py new file mode 100644 index 00000000000..0fdbf4ce318 --- /dev/null +++ b/lib/ansible/module_utils/common/sentinel.py @@ -0,0 +1,66 @@ +# Copyright (c) 2019 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import annotations + + +class Sentinel: + """ + Object which can be used to mark whether an entry as being special + + A sentinel value demarcates a value or marks an entry as having a special meaning. In C, the + Null byte is used as a sentinel for the end of a string. In Python, None is often used as + a Sentinel in optional parameters to mean that the parameter was not set by the user. + + You should use None as a Sentinel value any Python code where None is not a valid entry. If + None is a valid entry, though, then you need to create a different value, which is the purpose + of this class. + + Example of using Sentinel as a default parameter value:: + + def confirm_big_red_button(tristate=Sentinel): + if tristate is Sentinel: + print('You must explicitly press the big red button to blow up the base') + elif tristate is True: + print('Countdown to destruction activated') + elif tristate is False: + print('Countdown stopped') + elif tristate is None: + print('Waiting for more input') + + Example of using Sentinel to tell whether a dict which has a default value has been changed:: + + values = {'one': Sentinel, 'two': Sentinel} + defaults = {'one': 1, 'two': 2} + + # [.. Other code which does things including setting a new value for 'one' ..] + values['one'] = None + # [..] + + print('You made changes to:') + for key, value in values.items(): + if value is Sentinel: + continue + print('%s: %s' % (key, value) + """ + + def __new__(cls): + """ + Return the cls itself. This makes both equality and identity True for comparing the class + to an instance of the class, preventing common usage errors. + + Preferred usage:: + + a = Sentinel + if a is Sentinel: + print('Sentinel value') + + However, these are True as well, eliminating common usage errors:: + + if Sentinel is Sentinel(): + print('Sentinel value') + + if Sentinel == Sentinel(): + print('Sentinel value') + """ + return cls diff --git a/lib/ansible/module_utils/common/sys_info.py b/lib/ansible/module_utils/common/sys_info.py index 6ca451016f2..98dc3d631af 100644 --- a/lib/ansible/module_utils/common/sys_info.py +++ b/lib/ansible/module_utils/common/sys_info.py @@ -14,7 +14,7 @@ __all__ = ('get_distribution', 'get_distribution_version', 'get_platform_subclas def get_distribution(): - ''' + """ Return the name of the distribution the module is running on. :rtype: NativeString or None @@ -23,7 +23,7 @@ def get_distribution(): This function attempts to determine what distribution the code is running on and return a string representing that value. If the platform is Linux and the distribution cannot be determined, it returns ``OtherLinux``. - ''' + """ distribution = distro.id().capitalize() if platform.system() == 'Linux': @@ -38,14 +38,14 @@ def get_distribution(): def get_distribution_version(): - ''' + """ Get the version of the distribution the code is running on :rtype: NativeString or None :returns: A string representation of the version of the distribution. If it cannot determine the version, it returns an empty string. If this is not run on a Linux machine it returns None. - ''' + """ version = None needs_best_version = frozenset(( @@ -79,12 +79,12 @@ def get_distribution_version(): def get_distribution_codename(): - ''' + """ Return the code name for this Linux Distribution :rtype: NativeString or None :returns: A string representation of the distribution's codename or None if not a Linux distro - ''' + """ codename = None if platform.system() == 'Linux': # Until this gets merged and we update our bundled copy of distro: @@ -109,7 +109,7 @@ def get_distribution_codename(): def get_platform_subclass(cls): - ''' + """ Finds a subclass implementing desired functionality on the platform the code is running on :arg cls: Class to find an appropriate subclass for @@ -135,7 +135,7 @@ def get_platform_subclass(cls): def __new__(cls, *args, **kwargs): new_cls = get_platform_subclass(User) return super(cls, new_cls).__new__(new_cls) - ''' + """ this_platform = platform.system() distribution = get_distribution() diff --git a/lib/ansible/module_utils/common/text/converters.py b/lib/ansible/module_utils/common/text/converters.py index abef32d06d6..6bfa8470b69 100644 --- a/lib/ansible/module_utils/common/text/converters.py +++ b/lib/ansible/module_utils/common/text/converters.py @@ -278,11 +278,11 @@ def jsonify(data, **kwargs): def container_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'): - ''' Recursively convert dict keys and values to byte str + """ Recursively convert dict keys and values to byte str Specialized for json return because this only handles, lists, tuples, and dict container types (the containers that the json module returns) - ''' + """ if isinstance(d, text_type): return to_bytes(d, encoding=encoding, errors=errors) diff --git a/lib/ansible/module_utils/common/text/formatters.py b/lib/ansible/module_utils/common/text/formatters.py index 3096abec7c7..d548085c57f 100644 --- a/lib/ansible/module_utils/common/text/formatters.py +++ b/lib/ansible/module_utils/common/text/formatters.py @@ -20,6 +20,18 @@ SIZE_RANGES = { 'B': 1, } +VALID_UNITS = { + 'B': (('byte', 'B'), ('bit', 'b')), + 'K': (('kilobyte', 'KB'), ('kilobit', 'Kb')), + 'M': (('megabyte', 'MB'), ('megabit', 'Mb')), + 'G': (('gigabyte', 'GB'), ('gigabit', 'Gb')), + 'T': (('terabyte', 'TB'), ('terabit', 'Tb')), + 'P': (('petabyte', 'PB'), ('petabit', 'Pb')), + 'E': (('exabyte', 'EB'), ('exabit', 'Eb')), + 'Z': (('zetabyte', 'ZB'), ('zetabit', 'Zb')), + 'Y': (('yottabyte', 'YB'), ('yottabit', 'Yb')), +} + def lenient_lowercase(lst): """Lowercase elements of a list. @@ -53,7 +65,8 @@ def human_to_bytes(number, default_unit=None, isbits=False): The function expects 'b' (lowercase) as a bit identifier, e.g. 'Mb'/'Kb'/etc. if 'MB'/'KB'/... is passed, the ValueError will be rased. """ - m = re.search(r'^\s*(\d*\.?\d*)\s*([A-Za-z]+)?', str(number), flags=re.IGNORECASE) + m = re.search(r'^([0-9]*\.?[0-9]+)(?:\s*([A-Za-z]+))?\s*$', str(number)) + if m is None: raise ValueError("human_to_bytes() can't interpret following string: %s" % str(number)) try: @@ -86,10 +99,13 @@ def human_to_bytes(number, default_unit=None, isbits=False): expect_message = 'expect %s%s or %s' % (range_key, unit_class, range_key) if range_key == 'B': expect_message = 'expect %s or %s' % (unit_class, unit_class_name) - - if unit_class_name in unit.lower(): + unit_group = VALID_UNITS.get(range_key, None) + if unit_group is None: + raise ValueError(f"human_to_bytes() can't interpret a valid unit for {range_key}") + isbits_flag = 1 if isbits else 0 + if unit.lower() == unit_group[isbits_flag][0]: pass - elif unit[1] != unit_class: + elif unit != unit_group[isbits_flag][1]: raise ValueError("human_to_bytes() failed to convert %s. Value is not a valid string (%s)" % (number, expect_message)) return int(round(num * limit)) diff --git a/lib/ansible/module_utils/common/validation.py b/lib/ansible/module_utils/common/validation.py index 69721e47f18..1098f27336e 100644 --- a/lib/ansible/module_utils/common/validation.py +++ b/lib/ansible/module_utils/common/validation.py @@ -4,6 +4,7 @@ from __future__ import annotations +import decimal import json import os import re @@ -13,10 +14,10 @@ from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.collections import is_iterable from ansible.module_utils.common.text.converters import jsonify from ansible.module_utils.common.text.formatters import human_to_bytes +from ansible.module_utils.common.warnings import deprecate from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.six import ( binary_type, - integer_types, string_types, text_type, ) @@ -39,6 +40,10 @@ def count_terms(terms, parameters): def safe_eval(value, locals=None, include_exceptions=False): + deprecate( + "The safe_eval function should not be used.", + version="2.21", + ) # do not allow method calls to modules if not isinstance(value, string_types): # already templated to a datavaluestructure, perhaps? @@ -180,7 +185,7 @@ def check_required_by(requirements, parameters, options_context=None): :kwarg options_context: List of strings of parent key names if ``requirements`` are in a sub spec. - :returns: Empty dictionary or raises :class:`TypeError` if the + :returns: Empty dictionary or raises :class:`TypeError` if the check fails. """ result = {} @@ -190,22 +195,15 @@ def check_required_by(requirements, parameters, options_context=None): for (key, value) in requirements.items(): if key not in parameters or parameters[key] is None: continue - result[key] = [] # Support strings (single-item lists) if isinstance(value, string_types): value = [value] - for required in value: - if required not in parameters or parameters[required] is None: - result[key].append(required) - - if result: - for key, missing in result.items(): - if len(missing) > 0: - msg = "missing parameter(s) required by '%s': %s" % (key, ', '.join(missing)) - if options_context: - msg = "{0} found in {1}".format(msg, " -> ".join(options_context)) - raise TypeError(to_native(msg)) + if missing := [required for required in value if required not in parameters or parameters[required] is None]: + msg = f"missing parameter(s) required by '{key}': {', '.join(missing)}" + if options_context: + msg = f"{msg} found in {' -> '.join(options_context)}" + raise TypeError(to_native(msg)) return result @@ -415,7 +413,7 @@ def check_type_dict(value): Raises :class:`TypeError` if unable to convert to a dict - :arg value: Dict or string to convert to a dict. Accepts ``k1=v2, k2=v2``. + :arg value: Dict or string to convert to a dict. Accepts ``k1=v2, k2=v2`` or ``k1=v2 k2=v2``. :returns: value converted to a dictionary """ @@ -427,10 +425,14 @@ def check_type_dict(value): try: return json.loads(value) except Exception: - (result, exc) = safe_eval(value, dict(), include_exceptions=True) - if exc is not None: - raise TypeError('unable to evaluate string as dictionary') - return result + try: + result = literal_eval(value) + except Exception: + pass + else: + if isinstance(result, dict): + return result + raise TypeError('unable to evaluate string as dictionary') elif '=' in value: fields = [] field_buffer = [] @@ -457,7 +459,11 @@ def check_type_dict(value): field = ''.join(field_buffer) if field: fields.append(field) - return dict(x.split("=", 1) for x in fields) + try: + return dict(x.split("=", 1) for x in fields) + except ValueError: + # no "=" to split on: "k1=v1, k2" + raise TypeError('unable to evaluate string in the "key=value" format as dictionary') else: raise TypeError("dictionary requested, could not parse JSON or key=value") @@ -493,16 +499,15 @@ def check_type_int(value): :return: int of given value """ - if isinstance(value, integer_types): - return value - - if isinstance(value, string_types): + if not isinstance(value, int): try: - return int(value) - except ValueError: - pass - - raise TypeError('%s cannot be converted to an int' % type(value)) + if (decimal_value := decimal.Decimal(value)) != (int_value := int(decimal_value)): + raise ValueError("Significant decimal part found") + else: + value = int_value + except (decimal.DecimalException, TypeError, ValueError) as e: + raise TypeError(f'"{value!r}" cannot be converted to an int') from e + return value def check_type_float(value): @@ -514,16 +519,12 @@ def check_type_float(value): :returns: float of given value. """ - if isinstance(value, float): - return value - - if isinstance(value, (binary_type, text_type, int)): + if not isinstance(value, float): try: - return float(value) - except ValueError: - pass - - raise TypeError('%s cannot be converted to a float' % type(value)) + value = float(value) + except (TypeError, ValueError) as e: + raise TypeError(f'{type(value)} cannot be converted to a float') + return value def check_type_path(value,): diff --git a/lib/ansible/module_utils/compat/importlib.py b/lib/ansible/module_utils/compat/importlib.py deleted file mode 100644 index 4074f3733d0..00000000000 --- a/lib/ansible/module_utils/compat/importlib.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2020 Matt Martz -# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) - -from __future__ import annotations - -from ansible.module_utils.common.warnings import deprecate - - -def __getattr__(importable_name): - """Inject import-time deprecation warnings. - - Specifically, for ``import_module()``. - """ - if importable_name == 'import_module': - deprecate( - msg=f'The `ansible.module_utils.compat.importlib.' - f'{importable_name}` function is deprecated.', - version='2.19', - ) - from importlib import import_module - return import_module - - raise AttributeError( - f'cannot import name {importable_name !r} ' - f'has no attribute ({__file__ !s})', - ) diff --git a/lib/ansible/module_utils/compat/paramiko.py b/lib/ansible/module_utils/compat/paramiko.py index 8c84261cef8..302309cdaa8 100644 --- a/lib/ansible/module_utils/compat/paramiko.py +++ b/lib/ansible/module_utils/compat/paramiko.py @@ -11,7 +11,12 @@ PARAMIKO_IMPORT_ERR = None try: with warnings.catch_warnings(): - warnings.filterwarnings('ignore', message='Blowfish has been deprecated', category=UserWarning) + # Blowfish has been moved, but the deprecated import is used by paramiko versions older than 2.9.5. + # See: https://github.com/paramiko/paramiko/pull/2039 + warnings.filterwarnings('ignore', message='Blowfish has been ', category=UserWarning) + # TripleDES has been moved, but the deprecated import is used by paramiko versions older than 3.3.2 and 3.4.1. + # See: https://github.com/paramiko/paramiko/pull/2421 + warnings.filterwarnings('ignore', message='TripleDES has been ', category=UserWarning) import paramiko # pylint: disable=unused-import # paramiko and gssapi are incompatible and raise AttributeError not ImportError # When running in FIPS mode, cryptography raises InternalError diff --git a/lib/ansible/module_utils/compat/selectors.py b/lib/ansible/module_utils/compat/selectors.py deleted file mode 100644 index 81082f3fe36..00000000000 --- a/lib/ansible/module_utils/compat/selectors.py +++ /dev/null @@ -1,32 +0,0 @@ -# (c) 2014, 2017 Toshio Kuratomi -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see . - -from __future__ import annotations - -import selectors -import sys - -from ansible.module_utils.common.warnings import deprecate - - -sys.modules['ansible.module_utils.compat.selectors'] = selectors - - -deprecate( - msg='The `ansible.module_utils.compat.selectors` module is deprecated.', - version='2.19', -) diff --git a/lib/ansible/module_utils/compat/selinux.py b/lib/ansible/module_utils/compat/selinux.py index 0900388b761..a7a19cfd63f 100644 --- a/lib/ansible/module_utils/compat/selinux.py +++ b/lib/ansible/module_utils/compat/selinux.py @@ -11,8 +11,8 @@ from ctypes import CDLL, c_char_p, c_int, byref, POINTER, get_errno try: _selinux_lib = CDLL('libselinux.so.1', use_errno=True) -except OSError: - raise ImportError('unable to load libselinux.so') +except OSError as ex: + raise ImportError('unable to load libselinux.so') from ex def _module_setup(): diff --git a/lib/ansible/module_utils/csharp/Ansible.AccessToken.cs b/lib/ansible/module_utils/csharp/Ansible.AccessToken.cs index 49fba4e5e77..a7959efb305 100644 --- a/lib/ansible/module_utils/csharp/Ansible.AccessToken.cs +++ b/lib/ansible/module_utils/csharp/Ansible.AccessToken.cs @@ -339,19 +339,47 @@ namespace Ansible.AccessToken public static IEnumerable EnumerateUserTokens(SecurityIdentifier sid, TokenAccessLevels access = TokenAccessLevels.Query) { + return EnumerateUserTokens(sid, access, (p, h) => true); + } + + public static IEnumerable EnumerateUserTokens( + SecurityIdentifier sid, + TokenAccessLevels access, + Func processFilter) + { + // We always need the Query access level so we can query the TokenUser + access |= TokenAccessLevels.Query; + foreach (System.Diagnostics.Process process in System.Diagnostics.Process.GetProcesses()) { - // We always need the Query access level so we can query the TokenUser using (process) - using (SafeNativeHandle hToken = TryOpenAccessToken(process, access | TokenAccessLevels.Query)) + using (SafeNativeHandle processHandle = NativeMethods.OpenProcess(ProcessAccessFlags.QueryInformation, false, (UInt32)process.Id)) { - if (hToken == null) + if (processHandle.IsInvalid) + { continue; + } - if (!sid.Equals(GetTokenUser(hToken))) + if (!processFilter(process, processHandle)) + { continue; + } + + SafeNativeHandle accessToken; + if (!NativeMethods.OpenProcessToken(processHandle, access, out accessToken)) + { + continue; + } + + using (accessToken) + { + if (!sid.Equals(GetTokenUser(accessToken))) + { + continue; + } - yield return hToken; + yield return accessToken; + } } } } @@ -440,18 +468,5 @@ namespace Ansible.AccessToken for (int i = 0; i < array.Length; i++, ptrOffset = IntPtr.Add(ptrOffset, Marshal.SizeOf(typeof(T)))) array[i] = (T)Marshal.PtrToStructure(ptrOffset, typeof(T)); } - - private static SafeNativeHandle TryOpenAccessToken(System.Diagnostics.Process process, TokenAccessLevels access) - { - try - { - using (SafeNativeHandle hProcess = OpenProcess(process.Id, ProcessAccessFlags.QueryInformation, false)) - return OpenProcessToken(hProcess, access); - } - catch (Win32Exception) - { - return null; - } - } } } diff --git a/lib/ansible/module_utils/csharp/Ansible.Basic.cs b/lib/ansible/module_utils/csharp/Ansible.Basic.cs index 085958270d7..1095042fe17 100644 --- a/lib/ansible/module_utils/csharp/Ansible.Basic.cs +++ b/lib/ansible/module_utils/csharp/Ansible.Basic.cs @@ -73,7 +73,6 @@ namespace Ansible.Basic { "selinux_special_fs", null }, { "shell_executable", null }, { "socket", null }, - { "string_conversion_action", null }, { "syslog_facility", null }, { "target_log_info", "TargetLogInfo"}, { "tmpdir", "tmpdir" }, diff --git a/lib/ansible/module_utils/csharp/Ansible.Become.cs b/lib/ansible/module_utils/csharp/Ansible.Become.cs index d3bb1564fa6..08b73d404bf 100644 --- a/lib/ansible/module_utils/csharp/Ansible.Become.cs +++ b/lib/ansible/module_utils/csharp/Ansible.Become.cs @@ -93,10 +93,21 @@ namespace Ansible.Become CachedRemoteInteractive, CachedUnlock } + + [Flags] + public enum ProcessChildProcessPolicyFlags + { + None = 0x0, + NoChildProcessCreation = 0x1, + AuditNoChildProcessCreation = 0x2, + AllowSecureProcessCreation = 0x4, + } } internal class NativeMethods { + public const int ProcessChildProcessPolicy = 13; + [DllImport("advapi32.dll", SetLastError = true)] public static extern bool AllocateLocallyUniqueId( out Luid Luid); @@ -116,6 +127,13 @@ namespace Ansible.Become [DllImport("kernel32.dll")] public static extern UInt32 GetCurrentThreadId(); + [DllImport("kernel32.dll", SetLastError = true)] + public static extern bool GetProcessMitigationPolicy( + SafeNativeHandle hProcess, + int MitigationPolicy, + ref NativeHelpers.ProcessChildProcessPolicyFlags lpBuffer, + IntPtr dwLength); + [DllImport("user32.dll", SetLastError = true)] public static extern NoopSafeHandle GetProcessWindowStation(); @@ -217,6 +235,7 @@ namespace Ansible.Become }; private static int WINDOWS_STATION_ALL_ACCESS = 0x000F037F; private static int DESKTOP_RIGHTS_ALL_ACCESS = 0x000F01FF; + private static bool _getProcessMitigationPolicySupported = true; public static Result CreateProcessAsUser(string username, string password, string command) { @@ -337,9 +356,9 @@ namespace Ansible.Become // account or have administrative rights on the become access token. // If we ultimately are becoming the SYSTEM account we want the token with the most privileges available. // https://github.com/ansible/ansible/issues/71453 - bool mostPrivileges = becomeSid == "S-1-5-18"; + bool usedForProcess = becomeSid == "S-1-5-18"; systemToken = GetPrimaryTokenForUser(new SecurityIdentifier("S-1-5-18"), - new List() { "SeTcbPrivilege" }, mostPrivileges); + new List() { "SeTcbPrivilege" }, usedForProcess); if (systemToken != null) { try @@ -425,8 +444,10 @@ namespace Ansible.Become return userTokens; } - private static SafeNativeHandle GetPrimaryTokenForUser(SecurityIdentifier sid, - List requiredPrivileges = null, bool mostPrivileges = false) + private static SafeNativeHandle GetPrimaryTokenForUser( + SecurityIdentifier sid, + List requiredPrivileges = null, + bool usedForProcess = false) { // According to CreateProcessWithTokenW we require a token with // TOKEN_QUERY, TOKEN_DUPLICATE and TOKEN_ASSIGN_PRIMARY @@ -439,7 +460,16 @@ namespace Ansible.Become SafeNativeHandle userToken = null; int privilegeCount = 0; - foreach (SafeNativeHandle hToken in TokenUtil.EnumerateUserTokens(sid, dwAccess)) + // If we are using this token for the process, we need to check the + // process mitigation policy allows child processes to be created. + var processFilter = usedForProcess + ? (Func)((p, t) => + { + return GetProcessChildProcessPolicyFlags(t) == NativeHelpers.ProcessChildProcessPolicyFlags.None; + }) + : ((p, t) => true); + + foreach (SafeNativeHandle hToken in TokenUtil.EnumerateUserTokens(sid, dwAccess, processFilter)) { // Filter out any Network logon tokens, using become with that is useless when S4U // can give us a Batch logon @@ -450,7 +480,7 @@ namespace Ansible.Become List actualPrivileges = TokenUtil.GetTokenPrivileges(hToken).Select(x => x.Name).ToList(); // If the token has less or the same number of privileges than the current token, skip it. - if (mostPrivileges && privilegeCount >= actualPrivileges.Count) + if (usedForProcess && privilegeCount >= actualPrivileges.Count) continue; // Check that the required privileges are on the token @@ -475,7 +505,7 @@ namespace Ansible.Become // If we don't care about getting the token with the most privileges, escape the loop as we already // have a token. - if (!mostPrivileges) + if (!usedForProcess) break; } @@ -592,6 +622,35 @@ namespace Ansible.Become return null; } + private static NativeHelpers.ProcessChildProcessPolicyFlags GetProcessChildProcessPolicyFlags(SafeNativeHandle processHandle) + { + // Because this is only used to check the policy, we ignore any + // errors and pretend that the policy is None. + NativeHelpers.ProcessChildProcessPolicyFlags policy = NativeHelpers.ProcessChildProcessPolicyFlags.None; + + if (_getProcessMitigationPolicySupported) + { + try + { + if (NativeMethods.GetProcessMitigationPolicy( + processHandle, + NativeMethods.ProcessChildProcessPolicy, + ref policy, + (IntPtr)4)) + { + return policy; + } + } + catch (EntryPointNotFoundException) + { + // If the function is not available, we won't try to call it again + _getProcessMitigationPolicySupported = false; + } + } + + return policy; + } + private static NativeHelpers.SECURITY_LOGON_TYPE GetTokenLogonType(SafeNativeHandle hToken) { TokenStatistics stats = TokenUtil.GetTokenStatistics(hToken); @@ -648,4 +707,4 @@ namespace Ansible.Become { } } } -} +} \ No newline at end of file diff --git a/lib/ansible/module_utils/distro/__init__.py b/lib/ansible/module_utils/distro/__init__.py index bed0b5a5b77..6cdb84ae505 100644 --- a/lib/ansible/module_utils/distro/__init__.py +++ b/lib/ansible/module_utils/distro/__init__.py @@ -16,13 +16,13 @@ # along with Ansible. If not, see . -''' +""" Compat distro library. -''' +""" from __future__ import annotations # The following makes it easier for us to script updates of the bundled code -_BUNDLED_METADATA = {"pypi_name": "distro", "version": "1.8.0"} +_BUNDLED_METADATA = {"pypi_name": "distro", "version": "1.9.0"} # The following additional changes have been made: # * Remove optparse since it is not needed for our use. diff --git a/lib/ansible/module_utils/distro/_distro.py b/lib/ansible/module_utils/distro/_distro.py index e57d6b68545..a67edae735c 100644 --- a/lib/ansible/module_utils/distro/_distro.py +++ b/lib/ansible/module_utils/distro/_distro.py @@ -1,4 +1,4 @@ -# Copyright 2015,2016,2017 Nir Cohen +# Copyright 2015-2021 Nir Cohen # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -59,7 +59,7 @@ except ImportError: # Python 3.7 TypedDict = dict -__version__ = "1.8.0" +__version__ = "1.9.0" class VersionDict(TypedDict): @@ -129,6 +129,7 @@ _DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") # Base file names to be looked up for if _UNIXCONFDIR is not readable. _DISTRO_RELEASE_BASENAMES = [ "SuSE-release", + "altlinux-release", "arch-release", "base-release", "centos-release", @@ -155,6 +156,8 @@ _DISTRO_RELEASE_IGNORE_BASENAMES = ( "system-release", "plesk-release", "iredmail-release", + "board-release", + "ec2_version", ) @@ -247,6 +250,7 @@ def id() -> str: "rocky" Rocky Linux "aix" AIX "guix" Guix System + "altlinux" ALT Linux ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -995,10 +999,10 @@ class LinuxDistribution: For details, see :func:`distro.info`. """ - return dict( + return InfoDict( id=self.id(), version=self.version(pretty, best), - version_parts=dict( + version_parts=VersionDict( major=self.major_version(best), minor=self.minor_version(best), build_number=self.build_number(best), diff --git a/lib/ansible/module_utils/facts/ansible_collector.py b/lib/ansible/module_utils/facts/ansible_collector.py index ac81d1fd646..9fe1c8a84ee 100644 --- a/lib/ansible/module_utils/facts/ansible_collector.py +++ b/lib/ansible/module_utils/facts/ansible_collector.py @@ -39,13 +39,13 @@ from ansible.module_utils.common.collections import is_string class AnsibleFactCollector(collector.BaseFactCollector): - '''A FactCollector that returns results under 'ansible_facts' top level key. + """A FactCollector that returns results under 'ansible_facts' top level key. If a namespace if provided, facts will be collected under that namespace. For ex, a ansible.module_utils.facts.namespace.PrefixFactNamespace(prefix='ansible_') Has a 'from_gather_subset() constructor that populates collectors based on a - gather_subset specifier.''' + gather_subset specifier.""" def __init__(self, collectors=None, namespace=None, filter_spec=None): @@ -102,7 +102,7 @@ class AnsibleFactCollector(collector.BaseFactCollector): class CollectorMetaDataCollector(collector.BaseFactCollector): - '''Collector that provides a facts with the gather_subset metadata.''' + """Collector that provides a facts with the gather_subset metadata.""" name = 'gather_subset' _fact_ids = set() # type: t.Set[str] diff --git a/lib/ansible/module_utils/facts/collector.py b/lib/ansible/module_utils/facts/collector.py index 616188b3db9..f3e144f7dda 100644 --- a/lib/ansible/module_utils/facts/collector.py +++ b/lib/ansible/module_utils/facts/collector.py @@ -38,13 +38,13 @@ from ansible.module_utils.facts import timeout class CycleFoundInFactDeps(Exception): - '''Indicates there is a cycle in fact collector deps + """Indicates there is a cycle in fact collector deps If collector-B requires collector-A, and collector-A requires collector-B, that is a cycle. In that case, there is no ordering that will satisfy B before A and A and before B. That will cause this error to be raised. - ''' + """ pass @@ -64,9 +64,9 @@ class BaseFactCollector: required_facts = set() # type: t.Set[str] def __init__(self, collectors=None, namespace=None): - '''Base class for things that collect facts. + """Base class for things that collect facts. - 'collectors' is an optional list of other FactCollectors for composing.''' + 'collectors' is an optional list of other FactCollectors for composing.""" self.collectors = collectors or [] # self.namespace is a object with a 'transform' method that transforms @@ -88,8 +88,10 @@ class BaseFactCollector: return key_name def _transform_dict_keys(self, fact_dict): - '''update a dicts keys to use new names as transformed by self._transform_name''' + """update a dicts keys to use new names as transformed by self._transform_name""" + if fact_dict is None: + return {} for old_key in list(fact_dict.keys()): new_key = self._transform_name(old_key) # pop the item by old_key and replace it using new_key @@ -105,7 +107,7 @@ class BaseFactCollector: return facts_dict def collect(self, module=None, collected_facts=None): - '''do the fact collection + """do the fact collection 'collected_facts' is a object (a dict, likely) that holds all previously facts. This is intended to be used if a FactCollector needs to reference @@ -113,7 +115,7 @@ class BaseFactCollector: Returns a dict of facts. - ''' + """ facts_dict = {} return facts_dict @@ -123,12 +125,12 @@ def get_collector_names(valid_subsets=None, gather_subset=None, aliases_map=None, platform_info=None): - '''return a set of FactCollector names based on gather_subset spec. + """return a set of FactCollector names based on gather_subset spec. gather_subset is a spec describing which facts to gather. valid_subsets is a frozenset of potential matches for gather_subset ('all', 'network') etc minimal_gather_subsets is a frozenset of matches to always use, even for gather_subset='!all' - ''' + """ # Retrieve module parameters gather_subset = gather_subset or ['all'] @@ -265,11 +267,11 @@ def _get_requires_by_collector_name(collector_name, all_fact_subsets): def find_unresolved_requires(collector_names, all_fact_subsets): - '''Find any collector names that have unresolved requires + """Find any collector names that have unresolved requires Returns a list of collector names that correspond to collector classes whose .requires_facts() are not in collector_names. - ''' + """ unresolved = set() for collector_name in collector_names: @@ -349,7 +351,7 @@ def collector_classes_from_gather_subset(all_collector_classes=None, gather_subset=None, gather_timeout=None, platform_info=None): - '''return a list of collector classes that match the args''' + """return a list of collector classes that match the args""" # use gather_name etc to get the list of collectors diff --git a/lib/ansible/module_utils/facts/compat.py b/lib/ansible/module_utils/facts/compat.py index 38953140d80..7d389cbc44e 100644 --- a/lib/ansible/module_utils/facts/compat.py +++ b/lib/ansible/module_utils/facts/compat.py @@ -34,19 +34,19 @@ from ansible.module_utils.facts import ansible_collector def get_all_facts(module): - '''compat api for ansible 2.2/2.3 module_utils.facts.get_all_facts method + """compat api for ansible 2.2/2.3 module_utils.facts.get_all_facts method Expects module to be an instance of AnsibleModule, with a 'gather_subset' param. returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to - the fact value.''' + the fact value.""" gather_subset = module.params['gather_subset'] return ansible_facts(module, gather_subset=gather_subset) def ansible_facts(module, gather_subset=None): - '''Compat api for ansible 2.0/2.2/2.3 module_utils.facts.ansible_facts method + """Compat api for ansible 2.0/2.2/2.3 module_utils.facts.ansible_facts method 2.3/2.3 expects a gather_subset arg. 2.0/2.1 does not except a gather_subset arg @@ -57,7 +57,7 @@ def ansible_facts(module, gather_subset=None): returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to the fact value. - ''' + """ gather_subset = gather_subset or module.params.get('gather_subset', ['all']) gather_timeout = module.params.get('gather_timeout', 10) diff --git a/lib/ansible/module_utils/facts/hardware/freebsd.py b/lib/ansible/module_utils/facts/hardware/freebsd.py index c7f6c6c48b6..2ae52239632 100644 --- a/lib/ansible/module_utils/facts/hardware/freebsd.py +++ b/lib/ansible/module_utils/facts/hardware/freebsd.py @@ -224,9 +224,9 @@ class FreeBSDHardware(Hardware): return device_facts def get_dmi_facts(self): - ''' learn dmi facts from system + """ learn dmi facts from system - Use dmidecode executable if available''' + Use dmidecode executable if available""" dmi_facts = {} diff --git a/lib/ansible/module_utils/facts/hardware/linux.py b/lib/ansible/module_utils/facts/hardware/linux.py index cd0f41dcc26..f431c4e1f8c 100644 --- a/lib/ansible/module_utils/facts/hardware/linux.py +++ b/lib/ansible/module_utils/facts/hardware/linux.py @@ -24,11 +24,9 @@ import re import sys import time -from multiprocessing import cpu_count -from multiprocessing.pool import ThreadPool - -from ansible.module_utils.common.text.converters import to_text +from ansible.module_utils._internal._concurrent import _futures from ansible.module_utils.common.locale import get_best_parsable_locale +from ansible.module_utils.common.text.converters import to_text from ansible.module_utils.common.text.formatters import bytes_to_human from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size @@ -313,10 +311,10 @@ class LinuxHardware(Hardware): return cpu_facts def get_dmi_facts(self): - ''' learn dmi facts from system + """ learn dmi facts from system Try /sys first for dmi related facts. - If that is not available, fall back to dmidecode executable ''' + If that is not available, fall back to dmidecode executable """ dmi_facts = {} @@ -425,13 +423,13 @@ class LinuxHardware(Hardware): 'NA' ) sysinfo_re = re.compile( - r''' + r""" ^ (?:Manufacturer:\s+(?P.+))| (?:Type:\s+(?P.+))| (?:Sequence\ Code:\s+0+(?P.+)) $ - ''', + """, re.VERBOSE | re.MULTILINE ) data = get_file_content('/proc/sysinfo') @@ -577,7 +575,7 @@ class LinuxHardware(Hardware): # start threads to query each mount results = {} - pool = ThreadPool(processes=min(len(mtab_entries), cpu_count())) + executor = _futures.DaemonThreadPoolExecutor() maxtime = timeout.GATHER_TIMEOUT or timeout.DEFAULT_GATHER_TIMEOUT for fields in mtab_entries: # Transform octal escape sequences @@ -601,30 +599,29 @@ class LinuxHardware(Hardware): if not self.MTAB_BIND_MOUNT_RE.match(options): mount_info['options'] += ",bind" - results[mount] = {'info': mount_info, - 'extra': pool.apply_async(self.get_mount_info, (mount, device, uuids)), - 'timelimit': time.time() + maxtime} + results[mount] = {'info': mount_info, 'timelimit': time.monotonic() + maxtime} + results[mount]['extra'] = executor.submit(self.get_mount_info, mount, device, uuids) - pool.close() # done with new workers, start gc + # done with spawning new workers, start gc + executor.shutdown() - # wait for workers and get results - while results: + while results: # wait for workers and get results for mount in list(results): done = False res = results[mount]['extra'] try: - if res.ready(): + if res.done(): done = True - if res.successful(): - mount_size, uuid = res.get() + if res.exception() is None: + mount_size, uuid = res.result() if mount_size: results[mount]['info'].update(mount_size) results[mount]['info']['uuid'] = uuid or 'N/A' else: # failed, try to find out why, if 'res.successful' we know there are no exceptions - results[mount]['info']['note'] = 'Could not get extra information: %s.' % (to_text(res.get())) + results[mount]['info']['note'] = f'Could not get extra information: {res.exception()}' - elif time.time() > results[mount]['timelimit']: + elif time.monotonic() > results[mount]['timelimit']: done = True self.module.warn("Timeout exceeded when getting mount info for %s" % mount) results[mount]['info']['note'] = 'Could not get extra information due to timeout' @@ -773,10 +770,24 @@ class LinuxHardware(Hardware): if serial: d['serial'] = serial - for key, test in [('removable', '/removable'), - ('support_discard', '/queue/discard_granularity'), - ]: - d[key] = get_file_content(sysdir + test) + d['removable'] = get_file_content(sysdir + '/removable') + + # Historically, `support_discard` simply returned the value of + # `/sys/block/{device}/queue/discard_granularity`. When its value + # is `0`, then the block device doesn't support discards; + # _however_, it being greater than zero doesn't necessarily mean + # that the block device _does_ support discards. + # + # Another indication that a block device doesn't support discards + # is `/sys/block/{device}/queue/discard_max_hw_bytes` being equal + # to `0` (with the same caveat as above). So if either of those are + # `0`, set `support_discard` to zero, otherwise set it to the value + # of `discard_granularity` for backwards compatibility. + d['support_discard'] = ( + '0' + if get_file_content(sysdir + '/queue/discard_max_hw_bytes') == '0' + else get_file_content(sysdir + '/queue/discard_granularity') + ) if diskname in devs_wwn: d['wwn'] = devs_wwn[diskname] diff --git a/lib/ansible/module_utils/facts/hardware/openbsd.py b/lib/ansible/module_utils/facts/hardware/openbsd.py index f6765422536..b5f08c0092b 100644 --- a/lib/ansible/module_utils/facts/hardware/openbsd.py +++ b/lib/ansible/module_utils/facts/hardware/openbsd.py @@ -54,7 +54,7 @@ class OpenBSDHardware(Hardware): hardware_facts.update(self.get_dmi_facts()) hardware_facts.update(self.get_uptime_facts()) - # storage devices notorioslly prone to hang/block so they are under a timeout + # storage devices notoriously prone to hang/block so they are under a timeout try: hardware_facts.update(self.get_mount_facts()) except timeout.TimeoutError: diff --git a/lib/ansible/module_utils/facts/namespace.py b/lib/ansible/module_utils/facts/namespace.py index 3d0eb25353d..af195b21a15 100644 --- a/lib/ansible/module_utils/facts/namespace.py +++ b/lib/ansible/module_utils/facts/namespace.py @@ -33,7 +33,7 @@ class FactNamespace: self.namespace_name = namespace_name def transform(self, name): - '''Take a text name, and transforms it as needed (add a namespace prefix, etc)''' + """Take a text name, and transforms it as needed (add a namespace prefix, etc)""" return name def _underscore(self, name): diff --git a/lib/ansible/module_utils/facts/network/hpux.py b/lib/ansible/module_utils/facts/network/hpux.py index 5c8905a2763..2f01825bb24 100644 --- a/lib/ansible/module_utils/facts/network/hpux.py +++ b/lib/ansible/module_utils/facts/network/hpux.py @@ -20,7 +20,7 @@ from ansible.module_utils.facts.network.base import Network, NetworkCollector class HPUXNetwork(Network): """ - HP-UX-specifig subclass of Network. Defines networking facts: + HP-UX-specific subclass of Network. Defines networking facts: - default_interface - interfaces (a list of interface names) - interface_ dictionary of ipv4 address information. diff --git a/lib/ansible/module_utils/facts/network/linux.py b/lib/ansible/module_utils/facts/network/linux.py index 560cd255f37..d199d5a6ae3 100644 --- a/lib/ansible/module_utils/facts/network/linux.py +++ b/lib/ansible/module_utils/facts/network/linux.py @@ -295,8 +295,6 @@ class LinuxNetwork(Network): if not address == '::1': ips['all_ipv6_addresses'].append(address) - ip_path = self.module.get_bin_path("ip") - args = [ip_path, 'addr', 'show', 'primary', 'dev', device] rc, primary_data, stderr = self.module.run_command(args, errors='surrogate_then_replace') if rc == 0: diff --git a/lib/ansible/module_utils/facts/other/ohai.py b/lib/ansible/module_utils/facts/other/ohai.py index 8f0e4dcaecb..db62fe4d73e 100644 --- a/lib/ansible/module_utils/facts/other/ohai.py +++ b/lib/ansible/module_utils/facts/other/ohai.py @@ -25,7 +25,7 @@ from ansible.module_utils.facts.collector import BaseFactCollector class OhaiFactCollector(BaseFactCollector): - '''This is a subclass of Facts for including information gathered from Ohai.''' + """This is a subclass of Facts for including information gathered from Ohai.""" name = 'ohai' _fact_ids = set() # type: t.Set[str] diff --git a/lib/ansible/module_utils/facts/packages.py b/lib/ansible/module_utils/facts/packages.py index 21be56fab26..b5b9bcb35ef 100644 --- a/lib/ansible/module_utils/facts/packages.py +++ b/lib/ansible/module_utils/facts/packages.py @@ -3,24 +3,29 @@ from __future__ import annotations +import ansible.module_utils.compat.typing as t + from abc import ABCMeta, abstractmethod from ansible.module_utils.six import with_metaclass +from ansible.module_utils.basic import missing_required_lib from ansible.module_utils.common.process import get_bin_path +from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module from ansible.module_utils.common._utils import get_all_subclasses def get_all_pkg_managers(): - return {obj.__name__.lower(): obj for obj in get_all_subclasses(PkgMgr) if obj not in (CLIMgr, LibMgr)} + return {obj.__name__.lower(): obj for obj in get_all_subclasses(PkgMgr) if obj not in (CLIMgr, LibMgr, RespawningLibMgr)} class PkgMgr(with_metaclass(ABCMeta, object)): # type: ignore[misc] @abstractmethod - def is_available(self): + def is_available(self, handle_exceptions): # This method is supposed to return True/False if the package manager is currently installed/usable # It can also 'prep' the required systems in the process of detecting availability + # If handle_exceptions is false it should raise exceptions related to manager discovery instead of handling them. pass @abstractmethod @@ -58,16 +63,50 @@ class LibMgr(PkgMgr): self._lib = None super(LibMgr, self).__init__() - def is_available(self): + def is_available(self, handle_exceptions=True): found = False try: self._lib = __import__(self.LIB) found = True except ImportError: - pass + if not handle_exceptions: + raise Exception(missing_required_lib(self.LIB)) return found +class RespawningLibMgr(LibMgr): + + CLI_BINARIES = [] # type: t.List[str] + INTERPRETERS = ['/usr/bin/python3'] + + def is_available(self, handle_exceptions=True): + if super(RespawningLibMgr, self).is_available(): + return True + + for binary in self.CLI_BINARIES: + try: + bin_path = get_bin_path(binary) + except ValueError: + # Not an interesting exception to raise, just a speculative probe + continue + else: + # It looks like this package manager is installed + if not has_respawned(): + # See if respawning will help + interpreter_path = probe_interpreters_for_module(self.INTERPRETERS, self.LIB) + if interpreter_path: + respawn_module(interpreter_path) + # The module will exit when the respawned copy completes + + if not handle_exceptions: + raise Exception(f'Found executable at {bin_path}. {missing_required_lib(self.LIB)}') + + if not handle_exceptions: + raise Exception(missing_required_lib(self.LIB)) + + return False + + class CLIMgr(PkgMgr): CLI = None # type: str | None @@ -77,9 +116,12 @@ class CLIMgr(PkgMgr): self._cli = None super(CLIMgr, self).__init__() - def is_available(self): + def is_available(self, handle_exceptions=True): + found = False try: self._cli = get_bin_path(self.CLI) + found = True except ValueError: - return False - return True + if not handle_exceptions: + raise + return found diff --git a/lib/ansible/module_utils/facts/system/distribution.py b/lib/ansible/module_utils/facts/system/distribution.py index ee20fcb94f0..66c768a126f 100644 --- a/lib/ansible/module_utils/facts/system/distribution.py +++ b/lib/ansible/module_utils/facts/system/distribution.py @@ -30,7 +30,7 @@ def get_uname(module, flags=('-v')): def _file_exists(path, allow_empty=False): # not finding the file, exit early - if not os.path.exists(path): + if not os.path.isfile(path): return False # if just the path needs to exists (ie, it can be empty) we are done @@ -46,7 +46,7 @@ def _file_exists(path, allow_empty=False): class DistributionFiles: - '''has-a various distro file parsers (os-release, etc) and logic for finding the right one.''' + """has-a various distro file parsers (os-release, etc) and logic for finding the right one.""" # every distribution name mentioned here, must have one of # - allowempty == True # - be listed in SEARCH_STRING @@ -517,7 +517,7 @@ class Distribution(object): 'Linux Mint', 'SteamOS', 'Devuan', 'Kali', 'Cumulus Linux', 'Pop!_OS', 'Parrot', 'Pardus GNU/Linux', 'Uos', 'Deepin', 'OSMC'], 'Suse': ['SuSE', 'SLES', 'SLED', 'openSUSE', 'openSUSE Tumbleweed', - 'SLES_SAP', 'SUSE_LINUX', 'openSUSE Leap', 'ALP-Dolomite'], + 'SLES_SAP', 'SUSE_LINUX', 'openSUSE Leap', 'ALP-Dolomite', 'SL-Micro'], 'Archlinux': ['Archlinux', 'Antergos', 'Manjaro'], 'Mandrake': ['Mandrake', 'Mandriva'], 'Solaris': ['Solaris', 'Nexenta', 'OmniOS', 'OpenIndiana', 'SmartOS'], diff --git a/lib/ansible/module_utils/facts/system/fips.py b/lib/ansible/module_utils/facts/system/fips.py index dbecd8fbaa2..131434157d4 100644 --- a/lib/ansible/module_utils/facts/system/fips.py +++ b/lib/ansible/module_utils/facts/system/fips.py @@ -1,19 +1,6 @@ +# Copyright: Contributors to the Ansible project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Determine if a system is in 'fips' mode -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see . from __future__ import annotations @@ -30,9 +17,9 @@ class FipsFactCollector(BaseFactCollector): def collect(self, module=None, collected_facts=None): # NOTE: this is populated even if it is not set - fips_facts = {} - fips_facts['fips'] = False - data = get_file_content('/proc/sys/crypto/fips_enabled') - if data and data == '1': + fips_facts = { + 'fips': False + } + if get_file_content('/proc/sys/crypto/fips_enabled') == '1': fips_facts['fips'] = True return fips_facts diff --git a/lib/ansible/module_utils/facts/system/local.py b/lib/ansible/module_utils/facts/system/local.py index 3d656f5a345..66ec58a2e7d 100644 --- a/lib/ansible/module_utils/facts/system/local.py +++ b/lib/ansible/module_utils/facts/system/local.py @@ -1,17 +1,5 @@ -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see . +# Copyright: Contributors to the Ansible project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import annotations @@ -25,7 +13,6 @@ import ansible.module_utils.compat.typing as t from ansible.module_utils.common.text.converters import to_text from ansible.module_utils.facts.utils import get_file_content from ansible.module_utils.facts.collector import BaseFactCollector -from ansible.module_utils.six import PY3 from ansible.module_utils.six.moves import configparser, StringIO @@ -91,12 +78,9 @@ class LocalFactCollector(BaseFactCollector): # if that fails read it with ConfigParser cp = configparser.ConfigParser() try: - if PY3: - cp.read_file(StringIO(out)) - else: - cp.readfp(StringIO(out)) + cp.read_file(StringIO(out)) except configparser.Error: - fact = "error loading facts as JSON or ini - please check content: %s" % fn + fact = f"error loading facts as JSON or ini - please check content: {fn}" module.warn(fact) else: fact = {} @@ -104,8 +88,14 @@ class LocalFactCollector(BaseFactCollector): if sect not in fact: fact[sect] = {} for opt in cp.options(sect): - val = cp.get(sect, opt) - fact[sect][opt] = val + try: + val = cp.get(sect, opt) + except configparser.Error as ex: + fact = f"error loading facts as ini - please check content: {fn} ({ex})" + module.warn(fact) + continue + else: + fact[sect][opt] = val except Exception as e: fact = "Failed to convert (%s) to JSON: %s" % (fn, to_text(e)) module.warn(fact) diff --git a/lib/ansible/module_utils/facts/system/service_mgr.py b/lib/ansible/module_utils/facts/system/service_mgr.py index 4dfa7e99d44..20257967c1e 100644 --- a/lib/ansible/module_utils/facts/system/service_mgr.py +++ b/lib/ansible/module_utils/facts/system/service_mgr.py @@ -106,7 +106,7 @@ class ServiceMgrFactCollector(BaseFactCollector): proc_1 = proc_1.strip() if proc_1 is not None and (proc_1 == 'init' or proc_1.endswith('sh')): - # many systems return init, so this cannot be trusted, if it ends in 'sh' it probalby is a shell in a container + # many systems return init, so this cannot be trusted, if it ends in 'sh' it probably is a shell in a container proc_1 = None # if not init/None it should be an identifiable or custom init, so we are done! @@ -144,6 +144,8 @@ class ServiceMgrFactCollector(BaseFactCollector): service_mgr_name = 'systemd' elif os.path.exists('/etc/init.d/'): service_mgr_name = 'sysvinit' + elif os.path.exists('/etc/dinit.d/'): + service_mgr_name = 'dinit' if not service_mgr_name: # if we cannot detect, fallback to generic 'service' diff --git a/lib/ansible/module_utils/facts/system/systemd.py b/lib/ansible/module_utils/facts/system/systemd.py index 154dc73fb2a..3ba2bbfcbdf 100644 --- a/lib/ansible/module_utils/facts/system/systemd.py +++ b/lib/ansible/module_utils/facts/system/systemd.py @@ -29,19 +29,19 @@ class SystemdFactCollector(BaseFactCollector): def collect(self, module=None, collected_facts=None): systemctl_bin = module.get_bin_path("systemctl") + systemd_facts = {} if systemctl_bin and ServiceMgrFactCollector.is_systemd_managed(module): - rc, stdout, stderr = module.run_command( + rc, stdout, dummy = module.run_command( [systemctl_bin, "--version"], check_rc=False, ) - systemd_facts = {} - if rc != 0: return systemd_facts - systemd_facts["systemd"] = {} - systemd_facts["systemd"]["features"] = str(stdout.split("\n")[1]) - systemd_facts["systemd"]["version"] = int(stdout.split(" ")[1]) + systemd_facts["systemd"] = { + "features": str(stdout.split("\n")[1]), + "version": int(stdout.split(" ")[1]), + } - return systemd_facts + return systemd_facts diff --git a/lib/ansible/module_utils/facts/timeout.py b/lib/ansible/module_utils/facts/timeout.py index 5fb749fb6b3..3b0476245b8 100644 --- a/lib/ansible/module_utils/facts/timeout.py +++ b/lib/ansible/module_utils/facts/timeout.py @@ -48,7 +48,7 @@ def timeout(seconds=None, error_message="Timer expired"): return res.get(timeout_value) except multiprocessing.TimeoutError: # This is an ansible.module_utils.common.facts.timeout.TimeoutError - raise TimeoutError('Timer expired after %s seconds' % timeout_value) + raise TimeoutError(f'{error_message} after {timeout_value} seconds') finally: pool.terminate() diff --git a/lib/ansible/module_utils/facts/utils.py b/lib/ansible/module_utils/facts/utils.py index f7f6f19cec0..9131cd1c965 100644 --- a/lib/ansible/module_utils/facts/utils.py +++ b/lib/ansible/module_utils/facts/utils.py @@ -20,7 +20,7 @@ import os def get_file_content(path, default=None, strip=True): - ''' + """ Return the contents of a given file path :args path: path to file to return contents from @@ -28,7 +28,7 @@ def get_file_content(path, default=None, strip=True): :args strip: controls if we strip whitespace from the result or not :returns: String with file contents (optionally stripped) or 'default' value - ''' + """ data = default if os.path.exists(path) and os.access(path, os.R_OK): datafile = None @@ -62,7 +62,7 @@ def get_file_content(path, default=None, strip=True): def get_file_lines(path, strip=True, line_sep=None): - '''get list of lines from file''' + """get list of lines from file""" data = get_file_content(path, strip=strip) if data: if line_sep is None: diff --git a/lib/ansible/module_utils/json_utils.py b/lib/ansible/module_utils/json_utils.py index c6d4c7642d8..01fd2661d72 100644 --- a/lib/ansible/module_utils/json_utils.py +++ b/lib/ansible/module_utils/json_utils.py @@ -32,13 +32,13 @@ import json # pylint: disable=unused-import # NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any # changes are propagated there. def _filter_non_json_lines(data, objects_only=False): - ''' + """ Used to filter unrelated output around module JSON output, like messages from tcagetattr, or where dropbear spews MOTD on every single command (which is nuts). Filters leading lines before first line-starting occurrence of '{' or '[', and filter all trailing lines after matching close character (working from the bottom of output). - ''' + """ warnings = [] # Filter initial junk diff --git a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm1 b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm1 index b18a9a1729b..3a1a317ec66 100644 --- a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm1 +++ b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm1 @@ -37,7 +37,7 @@ Function Add-CSharpType { .PARAMETER CompileSymbols [String[]] A list of symbols to be defined during compile time. These are added to the existing symbols, 'CORECLR', 'WINDOWS', 'UNIX' that are set - conditionalls in this cmdlet. + conditionals in this cmdlet. .NOTES The following features were added to control the compiling options from the diff --git a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CamelConversion.psm1 b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CamelConversion.psm1 index 9b86f84188a..fb9fb11c490 100644 --- a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CamelConversion.psm1 +++ b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CamelConversion.psm1 @@ -4,7 +4,7 @@ # used by Convert-DictToSnakeCase to convert a string in camelCase # format to snake_case Function Convert-StringToSnakeCase($string) { - # cope with pluralized abbreaviations such as TargetGroupARNs + # cope with pluralized abbreviations such as TargetGroupARNs if ($string -cmatch "[A-Z]{3,}s") { $replacement_string = $string -creplace $matches[0], "_$($matches[0].ToLower())" diff --git a/lib/ansible/module_utils/pycompat24.py b/lib/ansible/module_utils/pycompat24.py deleted file mode 100644 index 27d61485b2c..00000000000 --- a/lib/ansible/module_utils/pycompat24.py +++ /dev/null @@ -1,73 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Modules you write using this snippet, which is embedded dynamically by Ansible -# still belong to the author of the module, and may assign their own license -# to the complete work. -# -# Copyright (c) 2016, Toshio Kuratomi -# Copyright (c) 2015, Marius Gedminas -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -from __future__ import annotations - -import sys - -from ansible.module_utils.common.warnings import deprecate - - -def get_exception(): - """Get the current exception. - - This code needs to work on Python 2.4 through 3.x, so we cannot use - "except Exception, e:" (SyntaxError on Python 3.x) nor - "except Exception as e:" (SyntaxError on Python 2.4-2.5). - Instead we must use :: - - except Exception: - e = get_exception() - - """ - deprecate( - msg='The `ansible.module_utils.pycompat24.get_exception` ' - 'function is deprecated.', - version='2.19', - ) - return sys.exc_info()[1] - - -def __getattr__(importable_name): - """Inject import-time deprecation warning for ``literal_eval()``.""" - if importable_name == 'literal_eval': - deprecate( - msg=f'The `ansible.module_utils.pycompat24.' - f'{importable_name}` function is deprecated.', - version='2.19', - ) - from ast import literal_eval - return literal_eval - - raise AttributeError( - f'cannot import name {importable_name !r} ' - f'has no attribute ({__file__ !s})', - ) - - -__all__ = ('get_exception', 'literal_eval') # pylint: disable=undefined-all-variable diff --git a/lib/ansible/module_utils/service.py b/lib/ansible/module_utils/service.py index 3910ea0174d..6d3ecea4b8d 100644 --- a/lib/ansible/module_utils/service.py +++ b/lib/ansible/module_utils/service.py @@ -42,13 +42,13 @@ from ansible.module_utils.common.text.converters import to_bytes, to_text def sysv_is_enabled(name, runlevel=None): - ''' + """ This function will check if the service name supplied is enabled in any of the sysv runlevels :arg name: name of the service to test for :kw runlevel: runlevel to check (default: None) - ''' + """ if runlevel: if not os.path.isdir('/etc/rc0.d/'): return bool(glob.glob('/etc/init.d/rc%s.d/S??%s' % (runlevel, name))) @@ -60,12 +60,12 @@ def sysv_is_enabled(name, runlevel=None): def get_sysv_script(name): - ''' + """ This function will return the expected path for an init script corresponding to the service name supplied. :arg name: name or path of the service to test for - ''' + """ if name.startswith('/'): result = name else: @@ -75,19 +75,19 @@ def get_sysv_script(name): def sysv_exists(name): - ''' + """ This function will return True or False depending on the existence of an init script corresponding to the service name supplied. :arg name: name of the service to test for - ''' + """ return os.path.exists(get_sysv_script(name)) def get_ps(module, pattern): - ''' + """ Last resort to find a service by trying to match pattern to programs in memory - ''' + """ found = False if platform.system() == 'SunOS': flags = '-ef' @@ -106,7 +106,7 @@ def get_ps(module, pattern): def fail_if_missing(module, found, service, msg=''): - ''' + """ This function will return an error or exit gracefully depending on check mode status and if the service is missing or not. @@ -114,16 +114,16 @@ def fail_if_missing(module, found, service, msg=''): :arg found: boolean indicating if services were found or not :arg service: name of service :kw msg: extra info to append to error/success msg when missing - ''' + """ if not found: module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg)) def fork_process(): - ''' + """ This function performs the double fork process to detach from the parent process and execute. - ''' + """ pid = os.fork() if pid == 0: @@ -147,9 +147,7 @@ def fork_process(): os._exit(0) # get new process session and detach - sid = os.setsid() - if sid == -1: - raise Exception("Unable to detach session while daemonizing") + os.setsid() # avoid possible problems with cwd being removed os.chdir("/") @@ -162,7 +160,7 @@ def fork_process(): def daemonize(module, cmd): - ''' + """ Execute a command while detaching as a daemon, returns rc, stdout, and stderr. :arg module: is an AnsibleModule object, used for it's utility methods @@ -171,7 +169,7 @@ def daemonize(module, cmd): This is complex because daemonization is hard for people. What we do is daemonize a part of this module, the daemon runs the command, picks up the return code and output, and returns it to the main process. - ''' + """ # init some vars chunk = 4096 # FIXME: pass in as arg? @@ -181,10 +179,8 @@ def daemonize(module, cmd): try: pipe = os.pipe() pid = fork_process() - except OSError: + except (OSError, RuntimeError): module.fail_json(msg="Error while attempting to fork: %s", exception=traceback.format_exc()) - except Exception as exc: - module.fail_json(msg=to_text(exc), exception=traceback.format_exc()) # we don't do any locking as this should be a unique module/process if pid == 0: diff --git a/lib/ansible/module_utils/splitter.py b/lib/ansible/module_utils/splitter.py index 7bddd32dae1..5ae3393fd60 100644 --- a/lib/ansible/module_utils/splitter.py +++ b/lib/ansible/module_utils/splitter.py @@ -30,10 +30,10 @@ from __future__ import annotations def _get_quote_state(token, quote_char): - ''' + """ the goal of this block is to determine if the quoted string is unterminated in which case it needs to be put back together - ''' + """ # the char before the current one, used to see if # the current character is escaped prev_char = None @@ -50,11 +50,11 @@ def _get_quote_state(token, quote_char): def _count_jinja2_blocks(token, cur_depth, open_token, close_token): - ''' + """ this function counts the number of opening/closing blocks for a given opening/closing type and adjusts the current depth for that block based on the difference - ''' + """ num_open = token.count(open_token) num_close = token.count(close_token) if num_open != num_close: @@ -65,7 +65,7 @@ def _count_jinja2_blocks(token, cur_depth, open_token, close_token): def split_args(args): - ''' + """ Splits args on whitespace, but intelligently reassembles those that may have been split over a jinja2 block or quotes. @@ -78,10 +78,10 @@ def split_args(args): Basically this is a variation shlex that has some more intelligence for how Ansible needs to use it. - ''' + """ # the list of params parsed out of the arg string - # this is going to be the result value when we are donei + # this is going to be the result value when we are done params = [] # here we encode the args, so we have a uniform charset to @@ -212,7 +212,7 @@ def is_quoted(data): def unquote(data): - ''' removes first and last quotes from a string, if the string starts and ends with the same quotes ''' + """ removes first and last quotes from a string, if the string starts and ends with the same quotes """ if is_quoted(data): return data[1:-1] return data diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py index c4c8e3ab7df..c90f0b78fd4 100644 --- a/lib/ansible/module_utils/urls.py +++ b/lib/ansible/module_utils/urls.py @@ -12,7 +12,7 @@ # Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) -''' +""" The **urls** utils module offers a replacement for the urllib python library. urllib is the python stdlib way to retrieve files from the Internet but it @@ -25,7 +25,7 @@ to replace urllib with a more secure library. However, all third party libraries require that the library be installed on the managed machine. That is an extra step for users making use of a module. If possible, avoid third party libraries by using this code instead. -''' +""" from __future__ import annotations @@ -223,10 +223,10 @@ UnixHTTPSConnection = None if HAS_SSL: @contextmanager def unix_socket_patch_httpconnection_connect(): - '''Monkey patch ``http.client.HTTPConnection.connect`` to be ``UnixHTTPConnection.connect`` + """Monkey patch ``http.client.HTTPConnection.connect`` to be ``UnixHTTPConnection.connect`` so that when calling ``super(UnixHTTPSConnection, self).connect()`` we get the correct behavior of creating self.sock for the unix socket - ''' + """ _connect = http.client.HTTPConnection.connect http.client.HTTPConnection.connect = UnixHTTPConnection.connect yield @@ -270,7 +270,7 @@ if HAS_SSL: class UnixHTTPConnection(http.client.HTTPConnection): - '''Handles http requests to a unix socket file''' + """Handles http requests to a unix socket file""" def __init__(self, unix_socket): self._unix_socket = unix_socket @@ -290,7 +290,7 @@ class UnixHTTPConnection(http.client.HTTPConnection): class UnixHTTPHandler(urllib.request.HTTPHandler): - '''Handler for Unix urls''' + """Handler for Unix urls""" def __init__(self, unix_socket, **kwargs): super().__init__(**kwargs) @@ -301,29 +301,29 @@ class UnixHTTPHandler(urllib.request.HTTPHandler): class ParseResultDottedDict(dict): - ''' + """ A dict that acts similarly to the ParseResult named tuple from urllib - ''' + """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.__dict__ = self def as_list(self): - ''' + """ Generate a list from this dict, that looks like the ParseResult named tuple - ''' + """ return [self.get(k, None) for k in ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')] def generic_urlparse(parts): - ''' + """ Returns a dictionary of url parts as parsed by urlparse, but accounts for the fact that older versions of that library do not support named attributes (ie. .netloc) This method isn't of much use any longer, but is kept in a minimal state for backwards compat. - ''' + """ result = ParseResultDottedDict(parts._asdict()) result.update({ 'username': parts.username, @@ -340,13 +340,16 @@ def extract_pem_certs(data): def get_response_filename(response): - url = response.geturl() - path = urlparse(url)[2] - filename = os.path.basename(path.rstrip('/')) or None - if filename: - filename = unquote(filename) + if filename := response.headers.get_param('filename', header='content-disposition'): + filename = os.path.basename(filename) + else: + url = response.geturl() + path = urlparse(url)[2] + filename = os.path.basename(path.rstrip('/')) or None + if filename: + filename = unquote(filename) - return response.headers.get_param('filename', header='content-disposition') or filename + return filename def parse_content_type(response): @@ -986,11 +989,11 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True, client_cert=None, client_key=None, cookies=None, use_gssapi=False, unix_socket=None, ca_path=None, unredirected_headers=None, decompress=True, ciphers=None, use_netrc=True): - ''' + """ Sends a request via HTTP(S) or FTP using urllib (Python3) Does not require the module environment - ''' + """ method = method or ('POST' if data else 'GET') return Request().open(method, url, data=data, headers=headers, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs, @@ -1114,10 +1117,10 @@ def basic_auth_header(username, password): def url_argument_spec(): - ''' + """ Creates an argument spec that can be used with any module that will be requesting content via urllib/urllib2 - ''' + """ return dict( url=dict(type='str'), force=dict(type='bool', default=False), @@ -1330,7 +1333,7 @@ def _split_multiext(name, min=3, max=4, count=2): def fetch_file(module, url, data=None, headers=None, method=None, use_proxy=True, force=False, last_mod_time=None, timeout=10, unredirected_headers=None, decompress=True, ciphers=None): - '''Download and save a file via HTTP(S) or FTP (needs the module as parameter). + """Download and save a file via HTTP(S) or FTP (needs the module as parameter). This is basically a wrapper around fetch_url(). :arg module: The AnsibleModule (used to get username, password etc. (s.b.). @@ -1348,7 +1351,7 @@ def fetch_file(module, url, data=None, headers=None, method=None, :kwarg ciphers: (optional) List of ciphers to use :returns: A string, the path to the downloaded file. - ''' + """ # download file bufsize = 65536 parts = urlparse(url) diff --git a/lib/ansible/modules/add_host.py b/lib/ansible/modules/add_host.py index de3c8619135..80a2d0aef8f 100644 --- a/lib/ansible/modules/add_host.py +++ b/lib/ansible/modules/add_host.py @@ -7,7 +7,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: add_host short_description: Add a host (and alternatively a group) to the ansible-playbook in-memory inventory @@ -61,7 +61,7 @@ notes: - The alias O(host) of the parameter O(name) is only available on Ansible 2.4 and newer. - Since Ansible 2.4, the C(inventory_dir) variable is now set to V(None) instead of the 'global inventory source', because you can now have multiple sources. An example was added that shows how to partially restore the previous behaviour. -- Though this module does not change the remote host, we do provide 'changed' status as it can be useful for those trying to track inventory changes. +- Though this module does not change the remote host, we do provide C(changed) status as it can be useful for those trying to track inventory changes. - The hosts added will not bypass the C(--limit) from the command line, so both of those need to be in agreement to make them available as play targets. They are still available from hostvars and for delegation as a normal part of the inventory. seealso: @@ -69,9 +69,9 @@ seealso: author: - Ansible Core Team - Seth Vidal (@skvidal) -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Add host to group 'just_created' with variable foo=42 ansible.builtin.add_host: name: '{{ ip_from_ec2 }}' @@ -111,4 +111,4 @@ EXAMPLES = r''' name: '{{ item }}' groups: done loop: "{{ ansible_play_hosts }}" -''' +""" diff --git a/lib/ansible/modules/apt.py b/lib/ansible/modules/apt.py index 858711e3a8f..266165f22a2 100644 --- a/lib/ansible/modules/apt.py +++ b/lib/ansible/modules/apt.py @@ -9,7 +9,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: apt short_description: Manages apt-packages @@ -63,21 +63,20 @@ options: default: 'no' default_release: description: - - Corresponds to the C(-t) option for I(apt) and sets pin priorities + - Corresponds to the C(-t) option for I(apt) and sets pin priorities. aliases: [ default-release ] type: str install_recommends: description: - - Corresponds to the C(--no-install-recommends) option for I(apt). V(true) installs recommended packages. V(false) does not install + - Corresponds to the C(--no-install-recommends) option for C(apt). V(true) installs recommended packages. V(false) does not install recommended packages. By default, Ansible will use the same defaults as the operating system. Suggested packages are never installed. aliases: [ install-recommends ] type: bool force: description: - - 'Corresponds to the C(--force-yes) to I(apt-get) and implies O(allow_unauthenticated=yes) and O(allow_downgrade=yes)' - - "This option will disable checking both the packages' signatures and the certificates of the - web servers they are downloaded from." - - 'This option *is not* the equivalent of passing the C(-f) flag to I(apt-get) on the command line' + - 'Corresponds to the C(--force-yes) to C(apt-get) and implies O(allow_unauthenticated=yes) and O(allow_downgrade=yes).' + - "This option will disable checking both the packages' signatures and the certificates of the web servers they are downloaded from." + - 'This option *is not* the equivalent of passing the C(-f) flag to C(apt-get) on the command line.' - '**This is a destructive operation with the potential to destroy your system, and it should almost never be used.** Please also see C(man apt-get) for more information.' type: bool @@ -85,7 +84,7 @@ options: clean: description: - Run the equivalent of C(apt-get clean) to clear out the local repository of retrieved package files. It removes everything but - the lock file from /var/cache/apt/archives/ and /var/cache/apt/archives/partial/. + the lock file from C(/var/cache/apt/archives/) and C(/var/cache/apt/archives/partial/). - Can be run as part of the package installation (clean runs before install) or as a separate step. type: bool default: 'no' @@ -93,7 +92,7 @@ options: allow_unauthenticated: description: - Ignore if packages cannot be authenticated. This is useful for bootstrapping environments that manage their own apt-key setup. - - 'O(allow_unauthenticated) is only supported with O(state): V(install)/V(present)' + - 'O(allow_unauthenticated) is only supported with O(state): V(install)/V(present).' aliases: [ allow-unauthenticated ] type: bool default: 'no' @@ -111,7 +110,7 @@ options: version_added: "2.12" allow_change_held_packages: description: - - Allows changing the version of a package which is on the apt hold list + - Allows changing the version of a package which is on the apt hold list. type: bool default: 'no' version_added: '2.13' @@ -128,14 +127,14 @@ options: type: str dpkg_options: description: - - Add dpkg options to apt command. Defaults to '-o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold"' - - Options should be supplied as comma separated list + - Add C(dpkg) options to C(apt) command. Defaults to C(-o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold"). + - Options should be supplied as comma separated list. default: force-confdef,force-confold type: str deb: description: - Path to a .deb package on the remote machine. - - If :// in the path, ansible will attempt to download deb before installing. (Version added 2.1) + - If C(://) in the path, ansible will attempt to download deb before installing. (Version added 2.1) - Requires the C(xz-utils) package to extract the control file of the deb package to install. type: path required: false @@ -143,7 +142,8 @@ options: autoremove: description: - If V(true), remove unused dependency packages for all module states except V(build-dep). It can also be used as the only option. - - Previous to version 2.4, autoclean was also an alias for autoremove, now it is its own separate command. See documentation for further information. + - Previous to version 2.4, O(autoclean) was also an alias for O(autoremove), now it is its own separate command. + See documentation for further information. type: bool default: 'no' version_added: "2.1" @@ -155,10 +155,10 @@ options: version_added: "2.4" policy_rc_d: description: - - Force the exit code of /usr/sbin/policy-rc.d. - - For example, if I(policy_rc_d=101) the installed package will not trigger a service start. - - If /usr/sbin/policy-rc.d already exists, it is backed up and restored after the package installation. - - If V(null), the /usr/sbin/policy-rc.d isn't created/changed. + - Force the exit code of C(/usr/sbin/policy-rc.d). + - For example, if O(policy_rc_d=101) the installed package will not trigger a service start. + - If C(/usr/sbin/policy-rc.d) already exists, it is backed up and restored after the package installation. + - If V(null), the C(/usr/sbin/policy-rc.d) is not created/changed. type: int default: null version_added: "2.8" @@ -179,7 +179,7 @@ options: version_added: "2.11" force_apt_get: description: - - Force usage of apt-get instead of aptitude + - Force usage of apt-get instead of aptitude. type: bool default: 'no' version_added: "2.4" @@ -205,22 +205,22 @@ attributes: platforms: debian notes: - Three of the upgrade modes (V(full), V(safe) and its alias V(true)) required C(aptitude) up to 2.3, since 2.4 C(apt-get) is used as a fall-back. - - In most cases, packages installed with apt will start newly installed services by default. Most distributions have mechanisms to avoid this. + - In most cases, packages installed with I(apt) will start newly installed services by default. Most distributions have mechanisms to avoid this. For example when installing Postgresql-9.5 in Debian 9, creating an executable shell script (/usr/sbin/policy-rc.d) that throws - a return code of 101 will stop Postgresql 9.5 starting up after install. Remove the file or its execute permission afterward. - - The apt-get commandline supports implicit regex matches here but we do not because it can let typos through easier + a return code of 101 will stop Postgresql 9.5 starting up after install. Remove the file or its execute permission afterward. + - The C(apt-get) commandline supports implicit regex matches here but we do not because it can let typos through easier (If you typo C(foo) as C(fo) apt-get would install packages that have "fo" in their name with a warning and a prompt for the user. - Since we don't have warnings and prompts before installing, we disallow this.Use an explicit fnmatch pattern if you want wildcarding) + Since there are no warnings and prompts before installing, we disallow this. Use an explicit fnmatch pattern if you want wildcarding). - When used with a C(loop:) each package will be processed individually, it is much more efficient to pass the list directly to the O(name) option. - When O(default_release) is used, an implicit priority of 990 is used. This is the same behavior as C(apt-get -t). - When an exact version is specified, an implicit priority of 1001 is used. - - If the interpreter can't import ``python-apt``/``python3-apt`` the module will check for it in system-owned interpreters as well. + - If the interpreter can't import C(python-apt)/C(python3-apt) the module will check for it in system-owned interpreters as well. If the dependency can't be found, the module will attempt to install it. If the dependency is found or installed, the module will be respawned under the correct interpreter. -''' +""" -EXAMPLES = ''' -- name: Install apache httpd (state=present is optional) +EXAMPLES = """ +- name: Install apache httpd (state=present is optional) ansible.builtin.apt: name: apache2 state: present @@ -327,9 +327,9 @@ EXAMPLES = ''' - name: Run the equivalent of "apt-get clean" as a separate step ansible.builtin.apt: clean: yes -''' +""" -RETURN = ''' +RETURN = """ cache_updated: description: if the cache was updated or not returned: success, in some cases @@ -355,7 +355,7 @@ stderr: returned: success, when needed type: str sample: "AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 127.0.1.1. Set the 'ServerName' directive globally to ..." -''' # NOQA +""" # NOQA # added to stave off future warnings about apt api import warnings @@ -365,8 +365,8 @@ import datetime import fnmatch import locale as locale_module import os -import random import re +import secrets import shutil import sys import tempfile @@ -381,8 +381,8 @@ from ansible.module_utils.six import string_types from ansible.module_utils.urls import fetch_file DPKG_OPTIONS = 'force-confdef,force-confold' -APT_GET_ZERO = "\n0 upgraded, 0 newly installed" -APTITUDE_ZERO = "\n0 packages upgraded, 0 newly installed" +APT_GET_ZERO = "\n0 upgraded, 0 newly installed, 0 to remove" +APTITUDE_ZERO = "\n0 packages upgraded, 0 newly installed, 0 to remove" APT_LISTS_PATH = "/var/lib/apt/lists" APT_UPDATE_SUCCESS_STAMP_PATH = "/var/lib/apt/periodic/update-success-stamp" APT_MARK_INVALID_OP = 'Invalid operation' @@ -507,7 +507,7 @@ def package_best_match(pkgname, version_cmp, version, release, cache): policy.create_pin('Release', pkgname, release, 990) if version_cmp == "=": # Installing a specific version from command line overrides all pinning - # We don't mimmic this exactly, but instead set a priority which is higher than all APT built-in pin priorities. + # We don't mimic this exactly, but instead set a priority which is higher than all APT built-in pin priorities. policy.create_pin('Version', pkgname, version, 1001) pkg = cache[pkgname] pkgver = policy.get_candidate_ver(pkg) @@ -1184,7 +1184,7 @@ def get_updated_cache_time(): # https://github.com/ansible/ansible-modules-core/issues/2951 def get_cache(module): - '''Attempt to get the cache object and update till it works''' + """Attempt to get the cache object and update till it works""" cache = None try: cache = apt.Cache() @@ -1252,6 +1252,7 @@ def main(): LC_ALL=locale, LC_MESSAGES=locale, LC_CTYPE=locale, + LANGUAGE=locale, ) module.run_command_environ_update = APT_ENV_VARS @@ -1387,7 +1388,7 @@ def main(): err = '' update_cache_retries = module.params.get('update_cache_retries') update_cache_retry_max_delay = module.params.get('update_cache_retry_max_delay') - randomize = random.randint(0, 1000) / 1000.0 + randomize = secrets.randbelow(1000) / 1000.0 for retry in range(update_cache_retries): try: diff --git a/lib/ansible/modules/apt_key.py b/lib/ansible/modules/apt_key.py index 669bad20c6f..3828f9a882b 100644 --- a/lib/ansible/modules/apt_key.py +++ b/lib/ansible/modules/apt_key.py @@ -8,7 +8,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: apt_key author: @@ -26,13 +26,13 @@ attributes: platform: platforms: debian notes: - - The apt-key command used by this module has been deprecated. See the L(Debian wiki,https://wiki.debian.org/DebianRepository/UseThirdParty) for details. - This module is kept for backwards compatibility for systems that still use apt-key as the main way to manage apt repository keys. + - The C(apt-key) command used by this module has been deprecated. See the L(Debian wiki,https://wiki.debian.org/DebianRepository/UseThirdParty) for details. + This module is kept for backwards compatibility for systems that still use C(apt-key) as the main way to manage apt repository keys. - As a sanity check, downloaded key id must match the one specified. - "Use full fingerprint (40 characters) key ids to avoid key collisions. To generate a full-fingerprint imported key: C(apt-key adv --list-public-keys --with-fingerprint --with-colons)." - - If you specify both the key id and the URL with O(state=present), the task can verify or add the key as needed. - - Adding a new key requires an apt cache update (e.g. using the M(ansible.builtin.apt) module's update_cache option). + - If you specify both the key O(id) and the O(url) with O(state=present), the task can verify or add the key as needed. + - Adding a new key requires an apt cache update (e.g. using the M(ansible.builtin.apt) module's C(update_cache) option). requirements: - gpg seealso: @@ -42,7 +42,7 @@ options: description: - The identifier of the key. - Including this allows check mode to correctly report the changed state. - - If specifying a subkey's id be aware that apt-key does not understand how to remove keys via a subkey id. Specify the primary key's id instead. + - If specifying a subkey's id be aware that apt-key does not understand how to remove keys via a subkey id. Specify the primary key's id instead. - This parameter is required when O(state) is set to V(absent). type: str data: @@ -79,9 +79,9 @@ options: on personally controlled sites using self-signed certificates. type: bool default: 'yes' -''' +""" -EXAMPLES = ''' +EXAMPLES = """ - name: One way to avoid apt_key once it is removed from your distro, armored keys should use .asc extension, binary should use .gpg block: - name: somerepo | no apt key @@ -133,9 +133,9 @@ EXAMPLES = ''' id: 9FED2BCBDCD29CDF762678CBAED4B06F473041FA file: /tmp/apt.gpg state: present -''' +""" -RETURN = ''' +RETURN = """ after: description: List of apt key ids or fingerprints after any modification returned: on change @@ -166,7 +166,7 @@ short_id: returned: always type: str sample: "A88D21E9" -''' +""" import os @@ -188,7 +188,7 @@ def lang_env(module): if not hasattr(lang_env, 'result'): locale = get_best_parsable_locale(module) - lang_env.result = dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale) + lang_env.result = dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale, LANGUAGE=locale) return lang_env.result diff --git a/lib/ansible/modules/apt_repository.py b/lib/ansible/modules/apt_repository.py index aa50c54c17f..b17801f5f89 100644 --- a/lib/ansible/modules/apt_repository.py +++ b/lib/ansible/modules/apt_repository.py @@ -9,7 +9,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: apt_repository short_description: Add and remove APT repositories @@ -41,13 +41,13 @@ options: default: "present" mode: description: - - The octal mode for newly created files in sources.list.d. + - The octal mode for newly created files in C(sources.list.d). - Default is what system uses (probably 0644). type: raw version_added: "1.6" update_cache: description: - - Run the equivalent of C(apt-get update) when a change occurs. Cache updates are run after making changes. + - Run the equivalent of C(apt-get update) when a change occurs. Cache updates are run after making changes. type: bool default: "yes" aliases: [ update-cache ] @@ -72,9 +72,9 @@ options: version_added: '1.8' filename: description: - - Sets the name of the source list file in sources.list.d. + - Sets the name of the source list file in C(sources.list.d). Defaults to a file name based on the repository source url. - The .list extension will be automatically added. + The C(.list) extension will be automatically added. type: str version_added: '2.1' codename: @@ -90,8 +90,8 @@ options: Without this library, the module does not work. - Runs C(apt-get install python-apt) for Python 2, and C(apt-get install python3-apt) for Python 3. - Only works with the system Python 2 or Python 3. If you are using a Python on the remote that is not - the system Python, set O(install_python_apt=false) and ensure that the Python apt library - for your Python version is installed some other way. + the system Python, set O(install_python_apt=false) and ensure that the Python apt library + for your Python version is installed some other way. type: bool default: true author: @@ -101,9 +101,9 @@ requirements: - python-apt (python 2) - python3-apt (python 3) - apt-key or gpg -''' +""" -EXAMPLES = ''' +EXAMPLES = """ - name: Add specified repository into sources list ansible.builtin.apt_repository: repo: deb http://archive.canonical.com/ubuntu hardy partner @@ -145,9 +145,9 @@ EXAMPLES = ''' ansible.builtin.apt_repository: repo: "deb [arch=amd64 signed-by=/etc/apt/keyrings/myrepo.asc] https://download.example.com/linux/ubuntu {{ ansible_distribution_release }} stable" state: present -''' +""" -RETURN = ''' +RETURN = """ repo: description: A source string for the repository returned: always @@ -167,16 +167,16 @@ sources_removed: type: list sample: ["/etc/apt/sources.list.d/artifacts_elastic_co_packages_6_x_apt.list"] version_added: "2.15" -''' +""" import copy import glob import json import os import re +import secrets import sys import tempfile -import random import time from ansible.module_utils.basic import AnsibleModule @@ -245,7 +245,7 @@ class SourcesList(object): self.load(file) def __iter__(self): - '''Simple iterator to go over all sources. Empty, non-source, and other not valid lines will be skipped.''' + """Simple iterator to go over all sources. Empty, non-source, and other not valid lines will be skipped.""" for file, sources in self.files.items(): for n, valid, enabled, source, comment in sources: if valid: @@ -315,9 +315,9 @@ class SourcesList(object): @staticmethod def _apt_cfg_file(filespec): - ''' + """ Wrapper for `apt_pkg` module for running with Python 2.5 - ''' + """ try: result = apt_pkg.config.find_file(filespec) except AttributeError: @@ -326,9 +326,9 @@ class SourcesList(object): @staticmethod def _apt_cfg_dir(dirspec): - ''' + """ Wrapper for `apt_pkg` module for running with Python 2.5 - ''' + """ try: result = apt_pkg.config.find_dir(dirspec) except AttributeError: @@ -413,10 +413,10 @@ class SourcesList(object): return new def modify(self, file, n, enabled=None, source=None, comment=None): - ''' + """ This function to be used with iterator, so we don't care of invalid sources. If source, enabled, or comment is None, original value from line ``n`` will be preserved. - ''' + """ valid, enabled_old, source_old, comment_old = self.files[file][n][1:] self.files[file][n] = (n, valid, self._choice(enabled, enabled_old), self._choice(source, source_old), self._choice(comment, comment_old)) @@ -504,7 +504,7 @@ class UbuntuSourcesList(SourcesList): if self.apt_key_bin: locale = get_best_parsable_locale(self.module) - APT_ENV = dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale, LC_CTYPE=locale) + APT_ENV = dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale, LC_CTYPE=locale, LANGUAGE=locale) self.module.run_command_environ_update = APT_ENV rc, out, err = self.module.run_command([self.apt_key_bin, 'export', key_fingerprint], check_rc=True) found = bool(not err or 'nothing exported' not in err) @@ -616,7 +616,7 @@ class UbuntuSourcesList(SourcesList): def revert_sources_list(sources_before, sources_after, sourceslist_before): - '''Revert the sourcelist files to their previous state.''' + """Revert the sourcelist files to their previous state.""" # First remove any new files that were created: for filename in set(sources_after.keys()).difference(sources_before.keys()): @@ -743,7 +743,7 @@ def main(): if update_cache: update_cache_retries = module.params.get('update_cache_retries') update_cache_retry_max_delay = module.params.get('update_cache_retry_max_delay') - randomize = random.randint(0, 1000) / 1000.0 + randomize = secrets.randbelow(1000) / 1000.0 cache = apt.Cache() for retry in range(update_cache_retries): diff --git a/lib/ansible/modules/assemble.py b/lib/ansible/modules/assemble.py index bd8ddf6cfff..ff570aee1b9 100644 --- a/lib/ansible/modules/assemble.py +++ b/lib/ansible/modules/assemble.py @@ -8,7 +8,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: assemble short_description: Assemble configuration files from fragments @@ -102,9 +102,9 @@ extends_documentation_fragment: - action_common_attributes.files - decrypt - files -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Assemble from fragments from a directory ansible.builtin.assemble: src: /etc/someapp/fragments @@ -121,9 +121,9 @@ EXAMPLES = r''' src: /etc/ssh/conf.d/ dest: /etc/ssh/sshd_config validate: /usr/sbin/sshd -t -f %s -''' +""" -RETURN = r'''#''' +RETURN = r"""#""" import codecs import os @@ -136,7 +136,7 @@ from ansible.module_utils.common.text.converters import to_native def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, tmpdir=None): - ''' assemble a file from a directory of fragments ''' + """ assemble a file from a directory of fragments """ tmpfd, temp_path = tempfile.mkstemp(dir=tmpdir) tmp = os.fdopen(tmpfd, 'wb') delimit_me = False diff --git a/lib/ansible/modules/assert.py b/lib/ansible/modules/assert.py index af758a53c51..90eeacb305f 100644 --- a/lib/ansible/modules/assert.py +++ b/lib/ansible/modules/assert.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: assert short_description: Asserts given expressions are true @@ -70,9 +70,9 @@ seealso: author: - Ansible Core Team - Michael DeHaan -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: A single condition can be supplied as string instead of list ansible.builtin.assert: that: "ansible_os_family != 'RedHat'" @@ -106,4 +106,4 @@ EXAMPLES = r''' - my_param <= 100 - my_param >= 0 quiet: true -''' +""" diff --git a/lib/ansible/modules/async_status.py b/lib/ansible/modules/async_status.py index e07143adb55..0a4eeb53ac2 100644 --- a/lib/ansible/modules/async_status.py +++ b/lib/ansible/modules/async_status.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: async_status short_description: Obtain status of asynchronous task @@ -51,9 +51,9 @@ seealso: author: - Ansible Core Team - Michael DeHaan -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" --- - name: Asynchronous dnf task ansible.builtin.dnf: @@ -75,9 +75,9 @@ EXAMPLES = r''' ansible.builtin.async_status: jid: '{{ dnf_sleeper.ansible_job_id }}' mode: cleanup -''' +""" -RETURN = r''' +RETURN = r""" ansible_job_id: description: The asynchronous job id returned: success @@ -105,7 +105,7 @@ erased: description: Path to erased job file returned: when file is erased type: str -''' +""" import json import os diff --git a/lib/ansible/modules/async_wrapper.py b/lib/ansible/modules/async_wrapper.py index cd87f1f4f2f..d33ebe196ed 100644 --- a/lib/ansible/modules/async_wrapper.py +++ b/lib/ansible/modules/async_wrapper.py @@ -75,13 +75,13 @@ def daemonize_self(): # NB: this function copied from module_utils/json_utils.py. Ensure any changes are propagated there. # FUTURE: AnsibleModule-ify this module so it's Ansiballz-compatible and can use the module_utils copy of this function. def _filter_non_json_lines(data): - ''' + """ Used to filter unrelated output around module JSON output, like messages from tcagetattr, or where dropbear spews MOTD on every single command (which is nuts). Filters leading lines before first line-starting occurrence of '{', and filter all trailing lines after matching close character (working from the bottom of output). - ''' + """ warnings = [] # Filter initial junk diff --git a/lib/ansible/modules/blockinfile.py b/lib/ansible/modules/blockinfile.py index 80f9f3ef3f3..e5240a0cc4f 100644 --- a/lib/ansible/modules/blockinfile.py +++ b/lib/ansible/modules/blockinfile.py @@ -7,7 +7,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: blockinfile short_description: Insert/update/remove a text block surrounded by marker lines @@ -50,12 +50,10 @@ options: description: - If specified and no begin/ending O(marker) lines are found, the block will be inserted after the last match of specified regular expression. - A special value is available; V(EOF) for inserting the block at the end of the file. - - If specified regular expression has no matches, V(EOF) will be used instead. + - If specified regular expression has no matches or no value is passed, V(EOF) will be used instead. - The presence of the multiline flag (?m) in the regular expression controls whether the match is done line by line or with multiple lines. This behaviour was added in ansible-core 2.14. type: str - choices: [ EOF, '*regex*' ] - default: EOF insertbefore: description: - If specified and no begin/ending O(marker) lines are found, the block will be inserted before the last match of specified regular expression. @@ -64,7 +62,6 @@ options: - The presence of the multiline flag (?m) in the regular expression controls whether the match is done line by line or with multiple lines. This behaviour was added in ansible-core 2.14. type: str - choices: [ BOF, '*regex*' ] create: description: - Create a new file if it does not exist. @@ -128,9 +125,9 @@ attributes: platforms: posix vault: support: none -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" # Before Ansible 2.3, option 'dest' or 'name' was used instead of 'path' - name: Insert/Update "Match User" configuration block in /etc/ssh/sshd_config prepending and appending a new line ansible.builtin.blockinfile: @@ -190,7 +187,7 @@ EXAMPLES = r''' insertafter: '(?m)SID_LIST_LISTENER_DG =\n.*\(SID_LIST =' marker: " " -''' +""" import re import os @@ -203,9 +200,8 @@ from ansible.module_utils.common.text.converters import to_bytes, to_native def write_changes(module, contents, path): tmpfd, tmpfile = tempfile.mkstemp(dir=module.tmpdir) - f = os.fdopen(tmpfd, 'wb') - f.write(contents) - f.close() + with os.fdopen(tmpfd, 'wb') as tf: + tf.write(contents) validate = module.params.get('validate', None) valid = not validate diff --git a/lib/ansible/modules/command.py b/lib/ansible/modules/command.py index 42d9beeff4b..ed71342ab6b 100644 --- a/lib/ansible/modules/command.py +++ b/lib/ansible/modules/command.py @@ -7,7 +7,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: command short_description: Execute commands on targets @@ -15,12 +15,11 @@ version_added: historical description: - The M(ansible.builtin.command) module takes the command name followed by a list of space-delimited arguments. - The given command will be executed on all selected nodes. - - The command(s) will not be - processed through the shell, so variables like C($HOSTNAME) and operations - like C("*"), C("<"), C(">"), C("|"), C(";") and C("&") will not work. + - The command(s) will not be processed through the shell, so operations like C("*"), C("<"), C(">"), C("|"), C(";") and C("&") will not work. + Also, environment variables are resolved via Python, not shell, see O(expand_argument_vars) and are left unchanged if not matched. Use the M(ansible.builtin.shell) module if you need these features. - - To create C(command) tasks that are easier to read than the ones using space-delimited - arguments, pass parameters using the C(args) L(task keyword,https://docs.ansible.com/ansible/latest/reference_appendices/playbooks_keywords.html#task) + - To create C(command) tasks that are easier to read than the ones using space-delimited arguments, + pass parameters using the C(args) L(task keyword,https://docs.ansible.com/ansible/latest/reference_appendices/playbooks_keywords.html#task) or use O(cmd) parameter. - Either a free form command or O(cmd) parameter is required, see the examples. - For Windows targets, use the M(ansible.windows.win_command) module instead. @@ -41,8 +40,8 @@ attributes: options: expand_argument_vars: description: - - Expands the arguments that are variables, for example C($HOME) will be expanded before being passed to the - command to run. + - Expands the arguments that are variables, for example C($HOME) will be expanded before being passed to the command to run. + - If a variable is not matched, it is left unchanged, unlike shell substitution which would remove it. - Set to V(false) to disable expansion and treat the value as a literal argument. type: bool default: true @@ -118,9 +117,9 @@ seealso: author: - Ansible Core Team - Michael DeHaan -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Return motd to registered var ansible.builtin.command: cat /etc/motd register: mymotd @@ -174,9 +173,9 @@ EXAMPLES = r''' - name: Safely use templated variable to run command. Always use the quote filter to avoid injection issues ansible.builtin.command: cat {{ myfile|quote }} register: myoutput -''' +""" -RETURN = r''' +RETURN = r""" msg: description: changed returned: always @@ -229,7 +228,7 @@ stderr_lines: returned: always type: list sample: [u'ls cannot access foo: No such file or directory', u'ls …'] -''' +""" import datetime import glob diff --git a/lib/ansible/modules/copy.py b/lib/ansible/modules/copy.py index 0a1dc7f7717..8a5297466f4 100644 --- a/lib/ansible/modules/copy.py +++ b/lib/ansible/modules/copy.py @@ -7,7 +7,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: copy version_added: historical @@ -109,7 +109,6 @@ options: description: - This flag indicates that filesystem links in the source tree, if they exist, should be followed. type: bool - default: yes version_added: '2.4' checksum: description: @@ -155,9 +154,9 @@ attributes: vault: support: full version_added: '2.2' -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Copy file with owner and permissions ansible.builtin.copy: src: /srv/myfiles/foo.conf @@ -220,9 +219,9 @@ EXAMPLES = r''' src: /etc/foo.conf dest: /path/to/link # link to /path/to/file follow: no -''' +""" -RETURN = r''' +RETURN = r""" dest: description: Destination file/path. returned: success @@ -283,7 +282,7 @@ state: returned: success type: str sample: file -''' +""" import errno import filecmp @@ -306,9 +305,9 @@ class AnsibleModuleError(Exception): def split_pre_existing_dir(dirname): - ''' + """ Return the first pre-existing directory and a list of the new directories that will be created. - ''' + """ head, tail = os.path.split(dirname) b_head = to_bytes(head, errors='surrogate_or_strict') if head == '': @@ -324,9 +323,9 @@ def split_pre_existing_dir(dirname): def adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed): - ''' + """ Walk the new directories list and make sure that permissions are as we would expect - ''' + """ if new_directory_list: working_dir = os.path.join(pre_existing_dir, new_directory_list.pop(0)) @@ -516,7 +515,7 @@ def main(): force=dict(type='bool', default=True), validate=dict(type='str'), directory_mode=dict(type='raw'), - remote_src=dict(type='bool'), + remote_src=dict(type='bool', default=False), local_follow=dict(type='bool'), checksum=dict(type='str'), follow=dict(type='bool', default=False), diff --git a/lib/ansible/modules/cron.py b/lib/ansible/modules/cron.py index 173c4fad446..0382aa6b265 100644 --- a/lib/ansible/modules/cron.py +++ b/lib/ansible/modules/cron.py @@ -10,7 +10,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: cron short_description: Manage cron.d and crontab entries @@ -131,6 +131,9 @@ options: version_added: "2.1" requirements: - cron (any 'vixie cron' conformant variant, like cronie) +notes: + - If you are experiencing permissions issues with cron and MacOS, + you should see the official MacOS documentation for further information. author: - Dane Summers (@dsummersl) - Mike Grozak (@rhaido) @@ -147,9 +150,9 @@ attributes: platform: support: full platforms: posix -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null" ansible.builtin.cron: name: "check dirs" @@ -202,9 +205,9 @@ EXAMPLES = r''' name: APP_HOME env: yes state: absent -''' +""" -RETURN = r'''#''' +RETURN = r"""#""" import os import platform @@ -259,10 +262,9 @@ class CronTab(object): if self.cron_file: # read the cronfile try: - f = open(self.b_cron_file, 'rb') - self.n_existing = to_native(f.read(), errors='surrogate_or_strict') - self.lines = self.n_existing.splitlines() - f.close() + with open(self.b_cron_file, 'rb') as f: + self.n_existing = to_native(f.read(), errors='surrogate_or_strict') + self.lines = self.n_existing.splitlines() except IOError: # cron file does not exist return @@ -325,7 +327,7 @@ class CronTab(object): os.unlink(path) if rc != 0: - self.module.fail_json(msg=err) + self.module.fail_json(msg=f"Failed to install new cronfile: {path}", stderr=err, stdout=out, rc=rc) # set SELinux permissions if self.module.selinux_enabled() and self.cron_file: diff --git a/lib/ansible/modules/deb822_repository.py b/lib/ansible/modules/deb822_repository.py index 0fa33c73d70..a27af10786c 100644 --- a/lib/ansible/modules/deb822_repository.py +++ b/lib/ansible/modules/deb822_repository.py @@ -4,7 +4,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ author: 'Ansible Core Team (@ansible)' short_description: 'Add and remove deb822 formatted repositories' description: @@ -145,9 +145,9 @@ options: requirements: - python3-debian / python-debian version_added: '2.15' -''' +""" -EXAMPLES = ''' +EXAMPLES = """ - name: Add debian repo deb822_repository: name: debian @@ -189,9 +189,9 @@ EXAMPLES = ''' components: stable architectures: amd64 signed_by: https://download.example.com/linux/ubuntu/gpg -''' +""" -RETURN = ''' +RETURN = """ repo: description: A source string for the repository returned: always @@ -224,7 +224,7 @@ key_filename: returned: always type: str sample: /etc/apt/keyrings/debian.gpg -''' +""" import os import re diff --git a/lib/ansible/modules/debconf.py b/lib/ansible/modules/debconf.py index 0ffaf0e79bb..701c19dabb6 100644 --- a/lib/ansible/modules/debconf.py +++ b/lib/ansible/modules/debconf.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: debconf short_description: Configure a .deb package @@ -86,9 +86,9 @@ options: default: false author: - Brian Coca (@bcoca) -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Set default locale to fr_FR.UTF-8 ansible.builtin.debconf: name: locales @@ -121,9 +121,9 @@ EXAMPLES = r''' value: "{{ site_passphrase }}" vtype: password no_log: True -''' +""" -RETURN = r'''#''' +RETURN = r"""#""" from ansible.module_utils.common.text.converters import to_text, to_native from ansible.module_utils.basic import AnsibleModule @@ -134,21 +134,24 @@ def get_password_value(module, pkg, question, vtype): cmd = [getsel] rc, out, err = module.run_command(cmd) if rc != 0: - module.fail_json(msg="Failed to get the value '%s' from '%s'" % (question, pkg)) + module.fail_json(msg=f"Failed to get the value '{question}' from '{pkg}': {err}") - desired_line = None for line in out.split("\n"): - if line.startswith(pkg): - desired_line = line - break - - if not desired_line: - module.fail_json(msg="Failed to find the value '%s' from '%s'" % (question, pkg)) - - (dpkg, dquestion, dvtype, dvalue) = desired_line.split() - if dquestion == question and dvtype == vtype: - return dvalue - return '' + if not line.startswith(pkg): + continue + + # line is a collection of tab separated values + fields = line.split('\t') + if len(fields) <= 3: + # No password found, return a blank password + return '' + try: + if fields[1] == question and fields[2] == vtype: + # If correct question and question type found, return password value + return fields[3] + except IndexError: + # Fail safe + return '' def get_selections(module, pkg): @@ -173,8 +176,6 @@ def set_selection(module, pkg, question, vtype, value, unseen): if unseen: cmd.append('-u') - if vtype == 'boolean': - value = value.lower() data = ' '.join([pkg, question, vtype, value]) return module.run_command(cmd, data=data) @@ -209,15 +210,17 @@ def main(): if vtype is None or value is None: module.fail_json(msg="when supplying a question you must supply a valid vtype and value") + # ensure we compare booleans supplied to the way debconf sees them (true/false strings) + if vtype == 'boolean': + value = to_text(value).lower() + # if question doesn't exist, value cannot match if question not in prev: changed = True else: existing = prev[question] - # ensure we compare booleans supplied to the way debconf sees them (true/false strings) if vtype == 'boolean': - value = to_text(value).lower() existing = to_text(prev[question]).lower() elif vtype == 'password': existing = get_password_value(module, pkg, question, vtype) diff --git a/lib/ansible/modules/debug.py b/lib/ansible/modules/debug.py index 325d2541c2c..c90b1eea806 100644 --- a/lib/ansible/modules/debug.py +++ b/lib/ansible/modules/debug.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: debug short_description: Print statements during execution @@ -68,9 +68,9 @@ seealso: author: - Dag Wieers (@dagwieers) - Michael DeHaan -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Print the gateway for each host when defined ansible.builtin.debug: msg: System {{ inventory_hostname }} has gateway {{ ansible_default_ipv4.gateway }} @@ -95,4 +95,4 @@ EXAMPLES = r''' msg: - "Provisioning based on YOUR_KEY which is: {{ lookup('ansible.builtin.env', 'YOUR_KEY') }}" - "These servers were built using the password of '{{ password_used }}'. Please retain this for later use." -''' +""" diff --git a/lib/ansible/modules/dnf.py b/lib/ansible/modules/dnf.py index ae53dd9dfcf..7ab874a941f 100644 --- a/lib/ansible/modules/dnf.py +++ b/lib/ansible/modules/dnf.py @@ -9,7 +9,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dnf version_added: 1.9 @@ -19,9 +19,15 @@ description: options: use_backend: description: - - By default, this module will select the backend based on the C(ansible_pkg_mgr) fact. + - Backend module to use. default: "auto" - choices: [ auto, yum, yum4, dnf4, dnf5 ] + choices: + auto: Automatically select the backend based on the C(ansible_facts.pkg_mgr) fact. + yum: Alias for V(auto) (see Notes) + dnf: M(ansible.builtin.dnf) + yum4: Alias for V(dnf) + dnf4: Alias for V(dnf) + dnf5: M(ansible.builtin.dnf5) type: str version_added: 2.15 name: @@ -288,17 +294,21 @@ notes: upstream dnf's API doesn't properly mark groups as installed, therefore upon removal the module is unable to detect that the group is installed U(https://bugzilla.redhat.com/show_bug.cgi?id=1620324). + - While O(use_backend=yum) and the ability to call the action plugin as + M(ansible.builtin.yum) are provided for syntax compatibility, the YUM + backend was removed in ansible-core 2.17 because the required libraries are + not available for any supported version of Python. If you rely on this + functionality, use an older version of Ansible. requirements: - python3-dnf - - for the autoremove option you need dnf >= 2.0.1" author: - Igor Gnatenko (@ignatenkobrain) - Cristian van Ee (@DJMuggs) - Berend De Schouwer (@berenddeschouwer) - Adam Miller (@maxamillion) -''' +""" -EXAMPLES = ''' +EXAMPLES = """ - name: Install the latest version of Apache ansible.builtin.dnf: name: httpd @@ -384,14 +394,13 @@ EXAMPLES = ''' ansible.builtin.dnf: name: '@postgresql/client' state: present -''' +""" import os import sys from ansible.module_utils.common.text.converters import to_native, to_text from ansible.module_utils.urls import fetch_file -from ansible.module_utils.compat.version import LooseVersion from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.locale import get_best_parsable_locale @@ -417,11 +426,7 @@ class DnfModule(YumDnf): self._ensure_dnf() self.pkg_mgr_name = "dnf" - - try: - self.with_modules = dnf.base.WITH_MODULES - except AttributeError: - self.with_modules = False + self.with_modules = dnf.base.WITH_MODULES def _sanitize_dnf_error_msg_install(self, spec, error): """ @@ -436,22 +441,6 @@ class DnfModule(YumDnf): return error - def _sanitize_dnf_error_msg_remove(self, spec, error): - """ - For unhandled dnf.exceptions.Error scenarios, there are certain error - messages we want to ignore in a removal scenario as known benign - failures. Do that here. - """ - if ( - 'no package matched' in to_native(error) or - 'No match for argument:' in to_native(error) - ): - return (False, "{0} is not installed".format(spec)) - - # Return value is tuple of: - # ("Is this actually a failure?", "Error Message") - return (True, error) - def _package_dict(self, package): """Return a dictionary of information for the package.""" # NOTE: This no longer contains the 'dnfstate' field because it is @@ -646,22 +635,14 @@ class DnfModule(YumDnf): """Return a fully configured dnf Base object.""" base = dnf.Base() self._configure_base(base, conf_file, disable_gpg_check, installroot, sslverify) - try: - # this method has been supported in dnf-4.2.17-6 or later - # https://bugzilla.redhat.com/show_bug.cgi?id=1788212 - base.setup_loggers() - except AttributeError: - pass - try: - base.init_plugins(set(self.disable_plugin), set(self.enable_plugin)) - base.pre_configure_plugins() - except AttributeError: - pass # older versions of dnf didn't require this and don't have these methods + + base.setup_loggers() + base.init_plugins(set(self.disable_plugin), set(self.enable_plugin)) + base.pre_configure_plugins() + self._specify_repositories(base, disablerepo, enablerepo) - try: - base.configure_plugins() - except AttributeError: - pass # older versions of dnf didn't require this and don't have these methods + + base.configure_plugins() try: if self.update_cache: @@ -727,22 +708,34 @@ class DnfModule(YumDnf): self.module.exit_json(msg="", results=results) def _is_installed(self, pkg): - return bool( - dnf.subject.Subject(pkg).get_best_query(sack=self.base.sack).installed().run() - ) + installed_query = dnf.subject.Subject(pkg).get_best_query(sack=self.base.sack).installed() + if dnf.util.is_glob_pattern(pkg): + available_query = dnf.subject.Subject(pkg).get_best_query(sack=self.base.sack).available() + return not ( + {p.name for p in available_query} - {p.name for p in installed_query} + ) + else: + return bool(installed_query) - def _is_newer_version_installed(self, pkg_name): + def _is_newer_version_installed(self, pkg_spec): try: - if isinstance(pkg_name, dnf.package.Package): - available = pkg_name + if isinstance(pkg_spec, dnf.package.Package): + installed = sorted(self.base.sack.query().installed().filter(name=pkg_spec.name, arch=pkg_spec.arch))[-1] + return installed.evr_gt(pkg_spec) else: - available = sorted( - dnf.subject.Subject(pkg_name).get_best_query(sack=self.base.sack).available().run() - )[-1] - installed = sorted(self.base.sack.query().installed().filter(name=available.name).run())[-1] + available = dnf.subject.Subject(pkg_spec).get_best_query(sack=self.base.sack).available() + installed = self.base.sack.query().installed().filter(name=available[0].name) + for arch in sorted(set(p.arch for p in installed)): # select only from already-installed arches for this case + installed_pkg = sorted(installed.filter(arch=arch))[-1] + try: + available_pkg = sorted(available.filter(arch=arch))[-1] + except IndexError: + continue # nothing currently available for this arch; keep going + if installed_pkg.evr_gt(available_pkg): + return True + return False except IndexError: return False - return installed > available def _mark_package_install(self, pkg_spec, upgrade=False): """Mark the package for install.""" @@ -801,16 +794,13 @@ class DnfModule(YumDnf): "results": [] } except dnf.exceptions.Error as e: - if to_text("already installed") in to_text(e): - return {'failed': False, 'msg': '', 'failure': ''} - else: - return { - 'failed': True, - 'msg': "Unknown Error occurred for package {0}.".format(pkg_spec), - 'failure': " ".join((pkg_spec, to_native(e))), - 'rc': 1, - "results": [] - } + return { + 'failed': True, + 'msg': "Unknown Error occurred for package {0}.".format(pkg_spec), + 'failure': " ".join((pkg_spec, to_native(e))), + 'rc': 1, + "results": [] + } return {'failed': False, 'msg': msg, 'failure': '', 'rc': 0} @@ -874,36 +864,20 @@ class DnfModule(YumDnf): return not_installed def _install_remote_rpms(self, filenames): - if int(dnf.__version__.split(".")[0]) >= 2: - pkgs = list(sorted(self.base.add_remote_rpms(list(filenames)), reverse=True)) - else: - pkgs = [] - try: - for filename in filenames: - pkgs.append(self.base.add_remote_rpm(filename)) - except IOError as e: - if to_text("Can not load RPM file") in to_text(e): - self.module.fail_json( - msg="Error occurred attempting remote rpm install of package: {0}. {1}".format(filename, to_native(e)), - results=[], - rc=1, - ) - if self.update_only: - self._update_only(pkgs) - else: - for pkg in pkgs: - try: - if self._is_newer_version_installed(pkg): - if self.allow_downgrade: - self.base.package_install(pkg, strict=self.base.conf.strict) - else: + try: + pkgs = self.base.add_remote_rpms(filenames) + if self.update_only: + self._update_only(pkgs) + else: + for pkg in pkgs: + if not (self._is_newer_version_installed(pkg) and not self.allow_downgrade): self.base.package_install(pkg, strict=self.base.conf.strict) - except Exception as e: - self.module.fail_json( - msg="Error occurred attempting remote rpm operation: {0}".format(to_native(e)), - results=[], - rc=1, - ) + except Exception as e: + self.module.fail_json( + msg="Error occurred attempting remote rpm operation: {0}".format(to_native(e)), + results=[], + rc=1, + ) def _is_module_installed(self, module_spec): if self.with_modules: @@ -1124,14 +1098,6 @@ class DnfModule(YumDnf): except dnf.exceptions.CompsError: # Group is already uninstalled. pass - except AttributeError: - # Group either isn't installed or wasn't marked installed at install time - # because of DNF bug - # - # This is necessary until the upstream dnf API bug is fixed where installing - # a group via the dnf API doesn't actually mark the group as installed - # https://bugzilla.redhat.com/show_bug.cgi?id=1620324 - pass for environment in environments: try: @@ -1140,25 +1106,11 @@ class DnfModule(YumDnf): # Environment is already uninstalled. pass - installed = self.base.sack.query().installed() for pkg_spec in pkg_specs: - # short-circuit installed check for wildcard matching - if '*' in pkg_spec: - try: - self.base.remove(pkg_spec) - except dnf.exceptions.MarkingError as e: - is_failure, handled_remove_error = self._sanitize_dnf_error_msg_remove(pkg_spec, to_native(e)) - if is_failure: - failure_response['failures'].append('{0} - {1}'.format(pkg_spec, to_native(e))) - else: - response['results'].append(handled_remove_error) - continue - - installed_pkg = dnf.subject.Subject(pkg_spec).get_best_query( - sack=self.base.sack).installed().run() - - for pkg in installed_pkg: - self.base.remove(str(pkg)) + try: + self.base.remove(pkg_spec) + except dnf.exceptions.MarkingError as e: + response['results'].append(f"{e.value}: {pkg_spec}") # Like the dnf CLI we want to allow recursive removal of dependent # packages @@ -1212,10 +1164,8 @@ class DnfModule(YumDnf): self.base.download_packages(self.base.transaction.install_set) except dnf.exceptions.DownloadError as e: - self.module.fail_json( - msg="Failed to download packages: {0}".format(to_text(e)), - results=[], - ) + failure_response['msg'] = "Failed to download packages: {0}".format(to_native(e)) + self.module.fail_json(**failure_response) # Validate GPG. This is NOT done in dnf.Base (it's done in the # upstream CLI subclass of dnf.Base) @@ -1256,33 +1206,10 @@ class DnfModule(YumDnf): failure_response['msg'] = "Depsolve Error occurred: {0}".format(to_native(e)) self.module.fail_json(**failure_response) except dnf.exceptions.Error as e: - if to_text("already installed") in to_text(e): - response['changed'] = False - response['results'].append("Package already installed: {0}".format(to_native(e))) - self.module.exit_json(**response) - else: - failure_response['msg'] = "Unknown Error occurred: {0}".format(to_native(e)) - self.module.fail_json(**failure_response) + failure_response['msg'] = "Unknown Error occurred: {0}".format(to_native(e)) + self.module.fail_json(**failure_response) def run(self): - """The main function.""" - - # Check if autoremove is called correctly - if self.autoremove: - if LooseVersion(dnf.__version__) < LooseVersion('2.0.1'): - self.module.fail_json( - msg="Autoremove requires dnf>=2.0.1. Current dnf version is %s" % dnf.__version__, - results=[], - ) - - # Check if download_dir is called correctly - if self.download_dir: - if LooseVersion(dnf.__version__) < LooseVersion('2.6.2'): - self.module.fail_json( - msg="download_dir requires dnf>=2.6.2. Current dnf version is %s" % dnf.__version__, - results=[], - ) - if self.update_cache and not self.names and not self.list: self.base = self._base( self.conf_file, self.disable_gpg_check, self.disablerepo, @@ -1340,7 +1267,7 @@ def main(): # list=repos # list=pkgspec - yumdnf_argument_spec['argument_spec']['use_backend'] = dict(default='auto', choices=['auto', 'yum', 'yum4', 'dnf4', 'dnf5']) + yumdnf_argument_spec['argument_spec']['use_backend'] = dict(default='auto', choices=['auto', 'dnf', 'yum', 'yum4', 'dnf4', 'dnf5']) module = AnsibleModule( **yumdnf_argument_spec diff --git a/lib/ansible/modules/dnf5.py b/lib/ansible/modules/dnf5.py index f54bc807924..df4ee206748 100644 --- a/lib/ansible/modules/dnf5.py +++ b/lib/ansible/modules/dnf5.py @@ -358,10 +358,23 @@ libdnf5 = None def is_installed(base, spec): settings = libdnf5.base.ResolveSpecSettings() - query = libdnf5.rpm.PackageQuery(base) - query.filter_installed() - match, nevra = query.resolve_pkg_spec(spec, settings, True) - return match + installed_query = libdnf5.rpm.PackageQuery(base) + installed_query.filter_installed() + match, nevra = installed_query.resolve_pkg_spec(spec, settings, True) + + # FIXME use `is_glob_pattern` function when available: + # https://github.com/rpm-software-management/dnf5/issues/1563 + glob_patterns = set("*[?") + if any(set(char) & glob_patterns for char in spec): + available_query = libdnf5.rpm.PackageQuery(base) + available_query.filter_available() + available_query.resolve_pkg_spec(spec, settings, True) + + return not ( + {p.get_name() for p in available_query} - {p.get_name() for p in installed_query} + ) + else: + return match def is_newer_version_installed(base, spec): @@ -438,7 +451,15 @@ class Dnf5Module(YumDnf): def fail_on_non_existing_plugins(self, base): # https://github.com/rpm-software-management/dnf5/issues/1460 - plugin_names = [p.get_name() for p in base.get_plugins_info()] + try: + plugin_names = [p.get_name() for p in base.get_plugins_info()] + except AttributeError: + # plugins functionality requires python3-libdnf5 5.2.0.0+ + # silently ignore here, the module will fail later when + # base.enable_disable_plugins is attempted to be used if + # user specifies enable_plugin/disable_plugin + return + msg = [] if enable_unmatched := set(self.enable_plugin).difference(plugin_names): msg.append( @@ -642,7 +663,7 @@ class Dnf5Module(YumDnf): results = [] if self.names == ["*"] and self.state == "latest": goal.add_rpm_upgrade(settings) - elif self.state in {"install", "present", "latest"}: + elif self.state in {"installed", "present", "latest"}: upgrade = self.state == "latest" for spec in self.names: if is_newer_version_installed(base, spec): @@ -675,7 +696,7 @@ class Dnf5Module(YumDnf): if transaction.get_problems(): failures = [] for log_event in transaction.get_resolve_logs(): - if log_event.get_problem() == libdnf5.base.GoalProblem_NOT_FOUND and self.state in {"install", "present", "latest"}: + if log_event.get_problem() == libdnf5.base.GoalProblem_NOT_FOUND and self.state in {"installed", "present", "latest"}: # NOTE dnf module compat failures.append("No package {} available.".format(log_event.get_spec())) else: diff --git a/lib/ansible/modules/dpkg_selections.py b/lib/ansible/modules/dpkg_selections.py index 6c66f6951f7..31841306d86 100644 --- a/lib/ansible/modules/dpkg_selections.py +++ b/lib/ansible/modules/dpkg_selections.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dpkg_selections short_description: Dpkg package selection selections @@ -39,8 +39,8 @@ attributes: platforms: debian notes: - This module will not cause any packages to be installed/removed/purged, use the M(ansible.builtin.apt) module for that. -''' -EXAMPLES = ''' +""" +EXAMPLES = """ - name: Prevent python from being upgraded ansible.builtin.dpkg_selections: name: python @@ -50,7 +50,7 @@ EXAMPLES = ''' ansible.builtin.dpkg_selections: name: python selection: install -''' +""" from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.locale import get_best_parsable_locale @@ -68,7 +68,7 @@ def main(): dpkg = module.get_bin_path('dpkg', True) locale = get_best_parsable_locale(module) - DPKG_ENV = dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale, LC_CTYPE=locale) + DPKG_ENV = dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale, LC_CTYPE=locale, LANGUAGE=locale) module.run_command_environ_update = DPKG_ENV name = module.params['name'] diff --git a/lib/ansible/modules/expect.py b/lib/ansible/modules/expect.py index 760d7148d60..90ece7d76f3 100644 --- a/lib/ansible/modules/expect.py +++ b/lib/ansible/modules/expect.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: expect version_added: '2.0' @@ -83,9 +83,9 @@ seealso: - module: ansible.builtin.script - module: ansible.builtin.shell author: "Matt Martz (@sivel)" -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Case insensitive password string match ansible.builtin.expect: command: passwd username @@ -116,7 +116,7 @@ EXAMPLES = r''' - "{{ db_username }}" "Database password": - "{{ db_password }}" -''' +""" import datetime import os diff --git a/lib/ansible/modules/fail.py b/lib/ansible/modules/fail.py index e7a057e3fe1..7e68c77070f 100644 --- a/lib/ansible/modules/fail.py +++ b/lib/ansible/modules/fail.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: fail short_description: Fail with custom message @@ -52,11 +52,11 @@ seealso: - module: ansible.builtin.meta author: - Dag Wieers (@dagwieers) -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Example using fail and when together ansible.builtin.fail: msg: The system may not be provisioned according to the CMDB status. when: cmdb_status != "to-be-staged" -''' +""" diff --git a/lib/ansible/modules/fetch.py b/lib/ansible/modules/fetch.py index a5edb767df0..5886a82ce8c 100644 --- a/lib/ansible/modules/fetch.py +++ b/lib/ansible/modules/fetch.py @@ -8,7 +8,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: fetch short_description: Fetch files from remote nodes @@ -95,9 +95,9 @@ seealso: author: - Ansible Core Team - Michael DeHaan -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Store file into /tmp/fetched/host.example.com/tmp/somefile ansible.builtin.fetch: src: /tmp/somefile @@ -120,4 +120,4 @@ EXAMPLES = r''' src: /tmp/uniquefile dest: special/prefix-{{ inventory_hostname }} flat: yes -''' +""" diff --git a/lib/ansible/modules/file.py b/lib/ansible/modules/file.py index 65d0b3f582b..47bd3a5fbf7 100644 --- a/lib/ansible/modules/file.py +++ b/lib/ansible/modules/file.py @@ -7,7 +7,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: file version_added: historical @@ -63,9 +63,9 @@ options: force: description: - > - Force the creation of the symlinks in two cases: the source file does + Force the creation of the links in two cases: if the link type is symbolic and the source file does not exist (but will appear later); the destination exists and is a file (so, we need to unlink the - O(path) file and create a symlink to the O(src) file in place of it). + O(path) file and create a link to the O(src) file in place of it). type: bool default: no follow: @@ -123,9 +123,9 @@ attributes: author: - Ansible Core Team - Michael DeHaan -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Change file ownership, group and permissions ansible.builtin.file: path: /etc/foo.conf @@ -214,8 +214,8 @@ EXAMPLES = r''' path: /etc/foo state: absent -''' -RETURN = r''' +""" +RETURN = r""" dest: description: Destination file/path, equal to the value passed to O(path). returned: O(state=touch), O(state=hard), O(state=link) @@ -226,12 +226,11 @@ path: returned: O(state=absent), O(state=directory), O(state=file) type: str sample: /path/to/file.txt -''' +""" import errno import os import shutil -import sys import time from pwd import getpwnam, getpwuid @@ -239,38 +238,13 @@ from grp import getgrnam, getgrgid from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.text.converters import to_bytes, to_native - +from ansible.module_utils.common.sentinel import Sentinel # There will only be a single AnsibleModule object per module module = None -class AnsibleModuleError(Exception): - def __init__(self, results): - self.results = results - - def __repr__(self): - return 'AnsibleModuleError(results={0})'.format(self.results) - - -class ParameterError(AnsibleModuleError): - pass - - -class Sentinel(object): - def __new__(cls, *args, **kwargs): - return cls - - -def _ansible_excepthook(exc_type, exc_value, tb): - # Using an exception allows us to catch it if the calling code knows it can recover - if issubclass(exc_type, AnsibleModuleError): - module.fail_json(**exc_value.results) - else: - sys.__excepthook__(exc_type, exc_value, tb) - - -def additional_parameter_handling(params): +def additional_parameter_handling(module): """Additional parameter validation and reformatting""" # When path is a directory, rewrite the pathname to be the file inside of the directory # TODO: Why do we exclude link? Why don't we exclude directory? Should we exclude touch? @@ -282,6 +256,7 @@ def additional_parameter_handling(params): # if state == file: place inside of the directory (use _original_basename) # if state == link: place inside of the directory (use _original_basename. Fallback to src?) # if state == hard: place inside of the directory (use _original_basename. Fallback to src?) + params = module.params if (params['state'] not in ("link", "absent") and os.path.isdir(to_bytes(params['path'], errors='surrogate_or_strict'))): basename = None @@ -307,17 +282,21 @@ def additional_parameter_handling(params): # make sure the target path is a directory when we're doing a recursive operation if params['recurse'] and params['state'] != 'directory': - raise ParameterError(results={"msg": "recurse option requires state to be 'directory'", - "path": params["path"]}) + module.fail_json( + msg="recurse option requires state to be 'directory'", + path=params["path"] + ) # Fail if 'src' but no 'state' is specified if params['src'] and params['state'] not in ('link', 'hard'): - raise ParameterError(results={'msg': "src option requires state to be 'link' or 'hard'", - 'path': params['path']}) + module.fail_json( + msg="src option requires state to be 'link' or 'hard'", + path=params['path'] + ) def get_state(path): - ''' Find out current state ''' + """ Find out current state """ b_path = to_bytes(path, errors='surrogate_or_strict') try: @@ -380,8 +359,8 @@ def recursive_set_attributes(b_path, follow, file_args, mtime, atime, diffs=None except RuntimeError as e: # on Python3 "RecursionError" is raised which is derived from "RuntimeError" # TODO once this function is moved into the common file utilities, this should probably raise more general exception - raise AnsibleModuleError( - results={'msg': "Could not recursively set attributes on %s. Original error was: '%s'" % (to_native(b_path), to_native(e))} + module.fail_json( + msg=f"Could not recursively set attributes on {to_native(b_path)}. Original error was: '{to_native(e)}'" ) return changed @@ -422,17 +401,17 @@ def initial_diff(path, state, prev_state): def get_timestamp_for_time(formatted_time, time_format): if formatted_time == 'preserve': return None - elif formatted_time == 'now': + if formatted_time == 'now': return Sentinel - else: - try: - struct = time.strptime(formatted_time, time_format) - struct_time = time.mktime(struct) - except (ValueError, OverflowError) as e: - raise AnsibleModuleError(results={'msg': 'Error while obtaining timestamp for time %s using format %s: %s' - % (formatted_time, time_format, to_native(e, nonstring='simplerepr'))}) + try: + struct = time.strptime(formatted_time, time_format) + struct_time = time.mktime(struct) + except (ValueError, OverflowError) as e: + module.fail_json( + msg=f"Error while obtaining timestamp for time {formatted_time} using format {time_format}: {to_native(e, nonstring='simplerepr')}", + ) - return struct_time + return struct_time def update_timestamp_for_file(path, mtime, atime, diff=None): @@ -489,18 +468,19 @@ def update_timestamp_for_file(path, mtime, atime, diff=None): diff['before']['atime'] = previous_atime diff['after']['atime'] = atime except OSError as e: - raise AnsibleModuleError(results={'msg': 'Error while updating modification or access time: %s' - % to_native(e, nonstring='simplerepr'), 'path': path}) + module.fail_json( + msg=f"Error while updating modification or access time: {to_native(e, nonstring='simplerepr')}", + path=path + ) return True def keep_backward_compatibility_on_timestamps(parameter, state): if state in ['file', 'hard', 'directory', 'link'] and parameter is None: return 'preserve' - elif state == 'touch' and parameter is None: + if state == 'touch' and parameter is None: return 'now' - else: - return parameter + return parameter def execute_diff_peek(path): @@ -533,14 +513,18 @@ def ensure_absent(path): try: shutil.rmtree(b_path, ignore_errors=False) except Exception as e: - raise AnsibleModuleError(results={'msg': "rmtree failed: %s" % to_native(e)}) + module.fail_json( + msg=f"rmtree failed: {to_native(e)}" + ) else: try: os.unlink(b_path) except OSError as e: if e.errno != errno.ENOENT: # It may already have been removed - raise AnsibleModuleError(results={'msg': "unlinking failed: %s " % to_native(e), - 'path': path}) + module.fail_json( + msg=f"unlinking failed: {to_native(e)}", + path=path + ) result.update({'path': path, 'changed': True, 'diff': diff, 'state': 'absent'}) else: @@ -569,9 +553,10 @@ def execute_touch(path, follow, timestamps): open(b_path, 'wb').close() changed = True except (OSError, IOError) as e: - raise AnsibleModuleError(results={'msg': 'Error, could not touch target: %s' - % to_native(e, nonstring='simplerepr'), - 'path': path}) + module.fail_json( + msg=f"Error, could not touch target: {to_native(e, nonstring='simplerepr')}", + path=path + ) # Update the attributes on the file diff = initial_diff(path, 'touch', prev_state) file_args = module.load_file_common_arguments(module.params) @@ -609,8 +594,11 @@ def ensure_file_attributes(path, follow, timestamps): if prev_state not in ('file', 'hard'): # file is not absent and any other state is a conflict - raise AnsibleModuleError(results={'msg': 'file (%s) is %s, cannot continue' % (path, prev_state), - 'path': path, 'state': prev_state}) + module.fail_json( + msg=f"file ({path}) is {prev_state}, cannot continue", + path=path, + state=prev_state + ) diff = initial_diff(path, 'file', prev_state) changed = module.set_fs_attributes_if_different(file_args, False, diff, expand=False) @@ -666,15 +654,18 @@ def ensure_directory(path, follow, recurse, timestamps): module.set_fs_attributes_if_different(tmp_file_args, False, mkdir_diff, expand=False) update_timestamp_for_file(file_args['path'], mtime, atime, mkdir_diff) except Exception as e: - raise AnsibleModuleError(results={'msg': 'There was an issue creating %s as requested:' - ' %s' % (curpath, to_native(e)), - 'path': path}) + module.fail_json( + msg=f"There was an issue creating {curpath} as requested: {to_native(e)}", + path=path + ) return {'path': path, 'changed': True, 'diff': mkdir_diffs} elif prev_state != 'directory': # We already know prev_state is not 'absent', therefore it exists in some form. - raise AnsibleModuleError(results={'msg': '%s already exists as a %s' % (path, prev_state), - 'path': path}) + module.fail_json( + msg=f"{path} already exists as a {prev_state}", + path=path + ) # # previous state == directory @@ -721,31 +712,39 @@ def ensure_symlink(path, src, follow, force, timestamps): b_absrc = to_bytes(absrc, errors='surrogate_or_strict') if not force and src is not None and not os.path.exists(b_absrc): - raise AnsibleModuleError(results={'msg': 'src file does not exist, use "force=yes" if you' - ' really want to create the link: %s' % absrc, - 'path': path, 'src': src}) + module.fail_json( + msg="src file does not exist, use 'force=yes' if you" + f" really want to create the link: {absrc}", + path=path, + src=src + ) if prev_state == 'directory': if not force: - raise AnsibleModuleError(results={'msg': 'refusing to convert from %s to symlink for %s' - % (prev_state, path), - 'path': path}) + module.fail_json( + msg=f'refusing to convert from {prev_state} to symlink for {path}', + path=path + ) elif os.listdir(b_path): # refuse to replace a directory that has files in it - raise AnsibleModuleError(results={'msg': 'the directory %s is not empty, refusing to' - ' convert it' % path, - 'path': path}) + module.fail_json( + msg=f'the directory {path} is not empty, refusing to convert it', + path=path + ) elif prev_state in ('file', 'hard') and not force: - raise AnsibleModuleError(results={'msg': 'refusing to convert from %s to symlink for %s' - % (prev_state, path), - 'path': path}) + module.fail_json( + msg=f'refusing to convert from {prev_state} to symlink for {path}', + path=path + ) diff = initial_diff(path, 'link', prev_state) changed = False if prev_state in ('hard', 'file', 'directory', 'absent'): if src is None: - raise AnsibleModuleError(results={'msg': 'src is required for creating new symlinks'}) + module.fail_json( + msg='src is required for creating new symlinks', + ) changed = True elif prev_state == 'link': if src is not None: @@ -755,7 +754,11 @@ def ensure_symlink(path, src, follow, force, timestamps): diff['after']['src'] = src changed = True else: - raise AnsibleModuleError(results={'msg': 'unexpected position reached', 'dest': path, 'src': src}) + module.fail_json( + msg='unexpected position reached', + dest=path, + src=src + ) if changed and not module.check_mode: if prev_state != 'absent': @@ -771,16 +774,18 @@ def ensure_symlink(path, src, follow, force, timestamps): except OSError as e: if os.path.exists(b_tmppath): os.unlink(b_tmppath) - raise AnsibleModuleError(results={'msg': 'Error while replacing: %s' - % to_native(e, nonstring='simplerepr'), - 'path': path}) + module.fail_json( + msg=f"Error while replacing: {to_native(e, nonstring='simplerepr')}", + path=path + ) else: try: os.symlink(b_src, b_path) except OSError as e: - raise AnsibleModuleError(results={'msg': 'Error while linking: %s' - % to_native(e, nonstring='simplerepr'), - 'path': path}) + module.fail_json( + msg=f"Error while linking: {to_native(e, nonstring='simplerepr')}", + path=path + ) if module.check_mode and not os.path.exists(b_path): return {'dest': path, 'src': src, 'changed': changed, 'diff': diff} @@ -815,12 +820,18 @@ def ensure_hardlink(path, src, follow, force, timestamps): # src is the source of a hardlink. We require it if we are creating a new hardlink. # We require path in the argument_spec so we know it is present at this point. if prev_state != 'hard' and src is None: - raise AnsibleModuleError(results={'msg': 'src is required for creating new hardlinks'}) + module.fail_json( + msg='src is required for creating new hardlinks' + ) # Even if the link already exists, if src was specified it needs to exist. # The inode number will be compared to ensure the link has the correct target. if src is not None and not os.path.exists(b_src): - raise AnsibleModuleError(results={'msg': 'src does not exist', 'dest': path, 'src': src}) + module.fail_json( + msg='src does not exist', + dest=path, + src=src + ) diff = initial_diff(path, 'hard', prev_state) changed = False @@ -834,26 +845,39 @@ def ensure_hardlink(path, src, follow, force, timestamps): diff['after']['src'] = src changed = True elif prev_state == 'hard': - if src is not None and not os.stat(b_path).st_ino == os.stat(b_src).st_ino: + if src is not None and os.stat(b_path).st_ino != os.stat(b_src).st_ino: changed = True if not force: - raise AnsibleModuleError(results={'msg': 'Cannot link, different hard link exists at destination', - 'dest': path, 'src': src}) + module.fail_json( + msg='Cannot link, different hard link exists at destination', + dest=path, + src=src + ) elif prev_state == 'file': changed = True if not force: - raise AnsibleModuleError(results={'msg': 'Cannot link, %s exists at destination' % prev_state, - 'dest': path, 'src': src}) + module.fail_json( + msg=f'Cannot link, {prev_state} exists at destination', + dest=path, + src=src + ) elif prev_state == 'directory': changed = True if os.path.exists(b_path): if os.stat(b_path).st_ino == os.stat(b_src).st_ino: return {'path': path, 'changed': False} elif not force: - raise AnsibleModuleError(results={'msg': 'Cannot link: different hard link exists at destination', - 'dest': path, 'src': src}) + module.fail_json( + msg='Cannot link: different hard link exists at destination', + dest=path, + src=src + ) else: - raise AnsibleModuleError(results={'msg': 'unexpected position reached', 'dest': path, 'src': src}) + module.fail_json( + msg='unexpected position reached', + dest=path, + src=src + ) if changed and not module.check_mode: if prev_state != 'absent': @@ -874,18 +898,20 @@ def ensure_hardlink(path, src, follow, force, timestamps): except OSError as e: if os.path.exists(b_tmppath): os.unlink(b_tmppath) - raise AnsibleModuleError(results={'msg': 'Error while replacing: %s' - % to_native(e, nonstring='simplerepr'), - 'path': path}) + module.fail_json( + msg=f"Error while replacing: {to_native(e, nonstring='simplerepr')}", + path=path + ) else: try: if follow and os.path.islink(b_src): b_src = os.readlink(b_src) os.link(b_src, b_path) except OSError as e: - raise AnsibleModuleError(results={'msg': 'Error while linking: %s' - % to_native(e, nonstring='simplerepr'), - 'path': path}) + module.fail_json( + msg=f"Error while linking: {to_native(e, nonstring='simplerepr')}", + path=path + ) if module.check_mode and not os.path.exists(b_path): return {'dest': path, 'src': src, 'changed': changed, 'diff': diff} @@ -947,9 +973,7 @@ def main(): supports_check_mode=True, ) - # When we rewrite basic.py, we will do something similar to this on instantiating an AnsibleModule - sys.excepthook = _ansible_excepthook - additional_parameter_handling(module.params) + additional_parameter_handling(module) params = module.params state = params['state'] @@ -990,6 +1014,9 @@ def main(): elif state == 'absent': result = ensure_absent(path) + if not module._diff: + result.pop('diff', None) + module.exit_json(**result) diff --git a/lib/ansible/modules/find.py b/lib/ansible/modules/find.py index 3379718130a..8c2820c48e7 100644 --- a/lib/ansible/modules/find.py +++ b/lib/ansible/modules/find.py @@ -9,7 +9,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: find author: Brian Coca (@bcoca) @@ -29,6 +29,10 @@ options: - You can choose seconds, minutes, hours, days, or weeks by specifying the first letter of any of those words (e.g., "1w"). type: str + get_checksum: + default: false + checksum_algorithm: + version_added: "2.19" patterns: default: [] description: @@ -75,10 +79,11 @@ options: paths: description: - List of paths of directories to search. All paths must be fully qualified. + - From ansible-core 2.18 and onwards, the data type has changed from C(str) to C(path). type: list required: true aliases: [ name, path ] - elements: str + elements: path file_type: description: - Type of file to select. @@ -131,11 +136,6 @@ options: - Set this to V(true) to follow symlinks in path for systems with python 2.6+. type: bool default: no - get_checksum: - description: - - Set this to V(true) to retrieve a file's SHA1 checksum. - type: bool - default: no use_regex: description: - If V(false), the patterns are file globs (shell). @@ -154,7 +154,15 @@ options: - When doing a O(contains) search, determine the encoding of the files to be searched. type: str version_added: "2.17" -extends_documentation_fragment: action_common_attributes + limit: + description: + - Limit the maximum number of matching paths returned. After finding this many, the find action will stop looking. + - Matches are made from the top, down (i.e. shallowest directory first). + - If not set, or set to v(null), it will do unlimited matches. + - Default is unlimited matches. + type: int + version_added: "2.18" +extends_documentation_fragment: [action_common_attributes, checksum_common] attributes: check_mode: details: since this action does not modify the target it just executes normally during check mode @@ -165,10 +173,10 @@ attributes: platforms: posix seealso: - module: ansible.windows.win_find -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Recursively find /tmp files older than 2 days ansible.builtin.find: paths: /tmp @@ -227,9 +235,19 @@ EXAMPLES = r''' - '^_[0-9]{2,4}_.*.log$' - '^[a-z]{1,5}_.*log$' -''' - -RETURN = r''' +- name: Find file containing "wally" without necessarily reading all files + ansible.builtin.find: + paths: /var/log + file_type: file + contains: wally + read_whole_file: true + patterns: "^.*\\.log$" + use_regex: true + recurse: true + limit: 1 +""" + +RETURN = r""" files: description: All matches found with the specified criteria (see stat module for full output of each dictionary) returned: success @@ -260,7 +278,7 @@ skipped_paths: type: dict sample: {"/laskdfj": "'/laskdfj' is not a directory"} version_added: '2.12' -''' +""" import errno import fnmatch @@ -283,7 +301,7 @@ class _Object: def pfilter(f, patterns=None, excludes=None, use_regex=False): - '''filter using glob patterns''' + """filter using glob patterns""" if not patterns and not excludes: return True @@ -322,7 +340,7 @@ def pfilter(f, patterns=None, excludes=None, use_regex=False): def agefilter(st, now, age, timestamp): - '''filter files older than age''' + """filter files older than age""" if age is None: return True elif age >= 0 and now - getattr(st, "st_%s" % timestamp) >= abs(age): @@ -333,7 +351,7 @@ def agefilter(st, now, age, timestamp): def sizefilter(st, size): - '''filter files greater than size''' + """filter files greater than size""" if size is None: return True elif size >= 0 and st.st_size >= abs(size): @@ -450,7 +468,7 @@ def statinfo(st): def main(): module = AnsibleModule( argument_spec=dict( - paths=dict(type='list', required=True, aliases=['name', 'path'], elements='str'), + paths=dict(type='list', required=True, aliases=['name', 'path'], elements='path'), patterns=dict(type='list', default=[], aliases=['pattern'], elements='str'), excludes=dict(type='list', aliases=['exclude'], elements='str'), contains=dict(type='str'), @@ -463,11 +481,15 @@ def main(): hidden=dict(type='bool', default=False), follow=dict(type='bool', default=False), get_checksum=dict(type='bool', default=False), + checksum_algorithm=dict(type='str', default='sha1', + choices=['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'], + aliases=['checksum', 'checksum_algo']), use_regex=dict(type='bool', default=False), depth=dict(type='int'), mode=dict(type='raw'), exact_mode=dict(type='bool', default=True), - encoding=dict(type='str') + encoding=dict(type='str'), + limit=dict(type='int') ), supports_check_mode=True, ) @@ -520,17 +542,20 @@ def main(): else: module.fail_json(size=params['size'], msg="failed to process size") + if params['limit'] is not None and params['limit'] <= 0: + module.fail_json(msg="limit cannot be %d (use None for unlimited)" % params['limit']) + now = time.time() msg = 'All paths examined' looked = 0 has_warnings = False for npath in params['paths']: - npath = os.path.expanduser(os.path.expandvars(npath)) try: if not os.path.isdir(npath): raise Exception("'%s' is not a directory" % to_native(npath)) - for root, dirs, files in os.walk(npath, onerror=handle_walk_errors, followlinks=params['follow']): + # Setting `topdown=True` to explicitly guarantee matches are made from the shallowest directory first + for root, dirs, files in os.walk(npath, onerror=handle_walk_errors, followlinks=params['follow'], topdown=True): looked = looked + len(files) + len(dirs) for fsobj in (files + dirs): fsname = os.path.normpath(os.path.join(root, fsobj)) @@ -560,7 +585,7 @@ def main(): r.update(statinfo(st)) if stat.S_ISREG(st.st_mode) and params['get_checksum']: - r['checksum'] = module.sha1(fsname) + r['checksum'] = module.digest_from_file(fsname, params['checksum_algorithm']) if stat.S_ISREG(st.st_mode): if sizefilter(st, size): @@ -585,7 +610,7 @@ def main(): r.update(statinfo(st)) if params['get_checksum']: - r['checksum'] = module.sha1(fsname) + r['checksum'] = module.digest_from_file(fsname, params['checksum_algorithm']) filelist.append(r) elif stat.S_ISLNK(st.st_mode) and params['file_type'] == 'link': @@ -596,7 +621,12 @@ def main(): r.update(statinfo(st)) filelist.append(r) - if not params['recurse']: + if len(filelist) == params["limit"]: + # Breaks out of directory files loop only + msg = "Limit of matches reached" + break + + if not params['recurse'] or len(filelist) == params["limit"]: break except Exception as e: skipped[npath] = to_text(e) diff --git a/lib/ansible/modules/gather_facts.py b/lib/ansible/modules/gather_facts.py index 85fbe873326..3d0275a0f6e 100644 --- a/lib/ansible/modules/gather_facts.py +++ b/lib/ansible/modules/gather_facts.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2017 Ansible Project +# Copyright: Contributors to the Ansible project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: gather_facts version_added: 2.8 @@ -57,7 +57,7 @@ notes: Order is not guaranteed, when doing parallel gathering on multiple modules. author: - "Ansible Core Team" -''' +""" RETURN = """ # depends on the fact module called diff --git a/lib/ansible/modules/get_url.py b/lib/ansible/modules/get_url.py index d7c1cc45d15..52c812c0c61 100644 --- a/lib/ansible/modules/get_url.py +++ b/lib/ansible/modules/get_url.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: get_url short_description: Downloads files from HTTP, HTTPS, or FTP to node @@ -219,9 +219,9 @@ seealso: - module: ansible.windows.win_get_url author: - Jan-Piet Mens (@jpmens) -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Download foo.conf ansible.builtin.get_url: url: http://example.com/path/file.conf @@ -272,9 +272,9 @@ EXAMPLES = r''' dest: /etc/foo.conf username: bar password: '{{ mysecret }}' -''' +""" -RETURN = r''' +RETURN = r""" backup_file: description: name of backup file created after download returned: changed and if backup=yes @@ -365,8 +365,9 @@ url: returned: always type: str sample: https://www.ansible.com/ -''' +""" +import email.message import os import re import shutil @@ -439,23 +440,16 @@ def url_get(module, url, dest, use_proxy, last_mod_time, force, timeout=10, head def extract_filename_from_headers(headers): - """ - Extracts a filename from the given dict of HTTP headers. - - Looks for the content-disposition header and applies a regex. - Returns the filename if successful, else None.""" - cont_disp_regex = 'attachment; ?filename="?([^"]+)' - res = None - - if 'content-disposition' in headers: - cont_disp = headers['content-disposition'] - match = re.match(cont_disp_regex, cont_disp) - if match: - res = match.group(1) - # Try preventing any funny business. - res = os.path.basename(res) + """Extracts a filename from the given dict of HTTP headers. - return res + Returns the filename if successful, else None. + """ + msg = email.message.Message() + msg['content-disposition'] = headers.get('content-disposition', '') + if filename := msg.get_param('filename', header='content-disposition'): + # Avoid directory traversal + filename = os.path.basename(filename) + return filename def is_url(checksum): @@ -663,6 +657,16 @@ def main(): result['checksum_src'] != result['checksum_dest']) module.exit_json(msg=info.get('msg', ''), **result) + # If a checksum was provided, ensure that the temporary file matches this checksum + # before moving it to the destination. + if checksum != '': + tmpsrc_checksum = module.digest_from_file(tmpsrc, algorithm) + + if checksum != tmpsrc_checksum: + os.remove(tmpsrc) + module.fail_json(msg=f"The checksum for {tmpsrc} did not match {checksum}; it was {tmpsrc_checksum}.", **result) + + # Copy temporary file to destination if necessary backup_file = None if result['checksum_src'] != result['checksum_dest']: try: @@ -681,13 +685,6 @@ def main(): if os.path.exists(tmpsrc): os.remove(tmpsrc) - if checksum != '': - destination_checksum = module.digest_from_file(dest, algorithm) - - if checksum != destination_checksum: - os.remove(dest) - module.fail_json(msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum), **result) - # allow file attribute changes file_args = module.load_file_common_arguments(module.params, path=dest) result['changed'] = module.set_fs_attributes_if_different(file_args, result['changed']) diff --git a/lib/ansible/modules/getent.py b/lib/ansible/modules/getent.py index b07fb82351a..1938af1fcfa 100644 --- a/lib/ansible/modules/getent.py +++ b/lib/ansible/modules/getent.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: getent short_description: A wrapper to the unix getent utility @@ -58,9 +58,9 @@ notes: - Not all databases support enumeration, check system documentation for details. author: - Brian Coca (@bcoca) -''' +""" -EXAMPLES = ''' +EXAMPLES = """ - name: Get root user info ansible.builtin.getent: database: passwd @@ -97,9 +97,9 @@ EXAMPLES = ''' - ansible.builtin.debug: var: ansible_facts.getent_shadow -''' +""" -RETURN = ''' +RETURN = """ ansible_facts: description: Facts to add to ansible_facts. returned: always @@ -112,7 +112,7 @@ ansible_facts: - Starting at 2.11 it now returns multiple duplicate entries, previously it only returned the last one returned: always type: list -''' +""" import traceback diff --git a/lib/ansible/modules/git.py b/lib/ansible/modules/git.py index 89e409b0e2e..14d26195461 100644 --- a/lib/ansible/modules/git.py +++ b/lib/ansible/modules/git.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: git author: @@ -236,68 +236,68 @@ notes: one solution is to use the option accept_hostkey. Another solution is to add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling the git module, with the following command: C(ssh-keyscan -H remote_host.com >> /etc/ssh/ssh_known_hosts)." -''' +""" -EXAMPLES = ''' +EXAMPLES = """ - name: Git checkout ansible.builtin.git: - repo: 'https://foosball.example.org/path/to/repo.git' - dest: /srv/checkout + repo: 'https://github.com/ansible/ansible.git' + dest: /tmp/checkout version: release-0.22 - name: Read-write git checkout from github ansible.builtin.git: - repo: git@github.com:mylogin/hello.git - dest: /home/mylogin/hello + repo: git@github.com:ansible/ansible.git + dest: /tmp/checkout - name: Just ensuring the repo checkout exists ansible.builtin.git: - repo: 'https://foosball.example.org/path/to/repo.git' - dest: /srv/checkout + repo: 'https://github.com/ansible/ansible.git' + dest: /tmp/checkout update: no - name: Just get information about the repository whether or not it has already been cloned locally ansible.builtin.git: - repo: 'https://foosball.example.org/path/to/repo.git' - dest: /srv/checkout + repo: git@github.com:ansible/ansible.git + dest: /tmp/checkout clone: no update: no - name: Checkout a github repo and use refspec to fetch all pull requests ansible.builtin.git: - repo: https://github.com/ansible/ansible-examples.git - dest: /src/ansible-examples + repo: 'https://github.com/ansible/ansible.git' + dest: /tmp/checkout refspec: '+refs/pull/*:refs/heads/*' - name: Create git archive from repo ansible.builtin.git: - repo: https://github.com/ansible/ansible-examples.git - dest: /src/ansible-examples - archive: /tmp/ansible-examples.zip + repo: git@github.com:ansible/ansible.git + dest: /tmp/checkout + archive: /tmp/ansible.zip - name: Clone a repo with separate git directory ansible.builtin.git: - repo: https://github.com/ansible/ansible-examples.git - dest: /src/ansible-examples - separate_git_dir: /src/ansible-examples.git + repo: 'https://github.com/ansible/ansible.git' + dest: /tmp/checkout + separate_git_dir: /tmp/repo - name: Example clone of a single branch ansible.builtin.git: - repo: https://github.com/ansible/ansible-examples.git - dest: /src/ansible-examples + repo: git@github.com:ansible/ansible.git + dest: /tmp/checkout single_branch: yes version: master - name: Avoid hanging when http(s) password is missing ansible.builtin.git: - repo: https://github.com/ansible/could-be-a-private-repo - dest: /src/from-private-repo + repo: 'https://github.com/ansible/ansible.git' + dest: /tmp/checkout environment: GIT_TERMINAL_PROMPT: 0 # reports "terminal prompts disabled" on missing password # or GIT_ASKPASS: /bin/true # for git before version 2.3.0, reports "Authentication failed" on missing password -''' +""" -RETURN = ''' +RETURN = """ after: description: Last commit revision of the repository retrieved during the update. returned: success @@ -328,7 +328,7 @@ git_dir_before: returned: success type: str sample: /path/to/old/git/dir -''' +""" import filecmp import os @@ -366,16 +366,15 @@ def relocate_repo(module, result, repo_dir, old_repo_dir, worktree_dir): def head_splitter(headfile, remote, module=None, fail_on_error=False): - '''Extract the head reference''' + """Extract the head reference""" # https://github.com/ansible/ansible-modules-core/pull/907 res = None if os.path.exists(headfile): rawdata = None try: - f = open(headfile, 'r') - rawdata = f.readline() - f.close() + with open(headfile, 'r') as f: + rawdata = f.readline() except Exception: if fail_on_error and module: module.fail_json(msg="Unable to read %s" % headfile) @@ -429,11 +428,11 @@ def get_submodule_update_params(module, git_path, cwd): def write_ssh_wrapper(module): - ''' + """ This writes an shell wrapper for ssh options to be used with git this is only relevant for older versions of gitthat cannot handle the options themselves. Returns path to the script - ''' + """ try: # make sure we have full permission to the module_dir, which # may not be the case if we're sudo'ing to a non-root user @@ -466,10 +465,10 @@ def write_ssh_wrapper(module): def set_git_ssh_env(key_file, ssh_opts, git_version, module): - ''' + """ use environment variables to configure git's ssh execution, - which varies by version but this functino should handle all. - ''' + which varies by version but this function should handle all. + """ # initialise to existing ssh opts and/or append user provided if ssh_opts is None: @@ -519,7 +518,7 @@ def set_git_ssh_env(key_file, ssh_opts, git_version, module): def get_version(module, git_path, dest, ref="HEAD"): - ''' samples the version of the git repo ''' + """ samples the version of the git repo """ cmd = "%s rev-parse %s" % (git_path, ref) rc, stdout, stderr = module.run_command(cmd, cwd=dest) @@ -571,7 +570,7 @@ def get_submodule_versions(git_path, module, dest, version='HEAD'): def clone(git_path, module, repo, dest, remote, depth, version, bare, reference, refspec, git_version_used, verify_commit, separate_git_dir, result, gpg_allowlist, single_branch): - ''' makes a new git repo if it does not already exist ''' + """ makes a new git repo if it does not already exist """ dest_dirname = os.path.dirname(dest) try: os.makedirs(dest_dirname) @@ -653,17 +652,17 @@ def has_local_mods(module, git_path, dest, bare): def reset(git_path, module, dest): - ''' + """ Resets the index and working tree to HEAD. Discards any changes to tracked files in working tree since that commit. - ''' + """ cmd = "%s reset --hard HEAD" % (git_path,) return module.run_command(cmd, check_rc=True, cwd=dest) def get_diff(module, git_path, dest, repo, remote, depth, bare, before, after): - ''' Return the difference between 2 versions ''' + """ Return the difference between 2 versions """ if before is None: return {'prepared': '>> Newly checked out %s' % after} elif before != after: @@ -817,13 +816,13 @@ def get_repo_path(dest, bare): def get_head_branch(git_path, module, dest, remote, bare=False): - ''' + """ Determine what branch HEAD is associated with. This is partly taken from lib/ansible/utils/__init__.py. It finds the correct path to .git/HEAD and reads from that file the branch that HEAD is associated with. In the case of a detached HEAD, this will look up the branch in .git/refs/remotes//HEAD. - ''' + """ try: repo_path = get_repo_path(dest, bare) except (IOError, ValueError) as err: @@ -845,7 +844,7 @@ def get_head_branch(git_path, module, dest, remote, bare=False): def get_remote_url(git_path, module, dest, remote): - '''Return URL of remote source for repo.''' + """Return URL of remote source for repo.""" command = [git_path, 'ls-remote', '--get-url', remote] (rc, out, err) = module.run_command(command, cwd=dest) if rc != 0: @@ -856,7 +855,7 @@ def get_remote_url(git_path, module, dest, remote): def set_remote_url(git_path, module, repo, dest, remote): - ''' updates repo from remote sources ''' + """ updates repo from remote sources """ # Return if remote URL isn't changing. remote_url = get_remote_url(git_path, module, dest, remote) if remote_url == repo or unfrackgitpath(remote_url) == unfrackgitpath(repo): @@ -874,7 +873,7 @@ def set_remote_url(git_path, module, repo, dest, remote): def fetch(git_path, module, repo, dest, version, remote, depth, bare, refspec, git_version_used, force=False): - ''' updates repo from remote sources ''' + """ updates repo from remote sources """ set_remote_url(git_path, module, repo, dest, remote) commands = [] @@ -981,7 +980,7 @@ def submodules_fetch(git_path, module, remote, track_submodules, dest): def submodule_update(git_path, module, dest, track_submodules, force=False): - ''' init and update any submodules ''' + """ init and update any submodules """ # get the valid submodule params params = get_submodule_update_params(module, git_path, dest) diff --git a/lib/ansible/modules/group.py b/lib/ansible/modules/group.py index a838db4a5c2..a31b9f8c73a 100644 --- a/lib/ansible/modules/group.py +++ b/lib/ansible/modules/group.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: group version_added: "0.0.2" @@ -62,6 +62,22 @@ options: type: bool default: no version_added: "2.8" + gid_min: + description: + - Sets the GID_MIN value for group creation. + - Overwrites /etc/login.defs default value. + - Currently supported on Linux. Does nothing when used with other platforms. + - Requires O(local) is omitted or V(False). + type: int + version_added: "2.18" + gid_max: + description: + - Sets the GID_MAX value for group creation. + - Overwrites /etc/login.defs default value. + - Currently supported on Linux. Does nothing when used with other platforms. + - Requires O(local) is omitted or V(False). + type: int + version_added: "2.18" extends_documentation_fragment: action_common_attributes attributes: check_mode: @@ -75,9 +91,9 @@ seealso: - module: ansible.windows.win_group author: - Stephen Fromm (@sfromm) -''' +""" -EXAMPLES = ''' +EXAMPLES = """ - name: Ensure group "somegroup" exists ansible.builtin.group: name: somegroup @@ -88,9 +104,9 @@ EXAMPLES = ''' name: docker state: present gid: 1750 -''' +""" -RETURN = r''' +RETURN = r""" gid: description: Group ID of the group. returned: When O(state) is C(present) @@ -111,7 +127,7 @@ system: returned: When O(state) is C(present) type: bool sample: False -''' +""" import grp import os @@ -151,6 +167,14 @@ class Group(object): self.system = module.params['system'] self.local = module.params['local'] self.non_unique = module.params['non_unique'] + self.gid_min = module.params['gid_min'] + self.gid_max = module.params['gid_max'] + + if self.local: + if self.gid_min is not None: + module.fail_json(msg="'gid_min' can not be used with 'local'") + if self.gid_max is not None: + module.fail_json(msg="'gid_max' can not be used with 'local'") def execute_command(self, cmd): return self.module.run_command(cmd) @@ -184,6 +208,12 @@ class Group(object): cmd.append('-o') elif key == 'system' and kwargs[key] is True: cmd.append('-r') + if self.gid_min is not None: + cmd.append('-K') + cmd.append('GID_MIN=' + str(self.gid_min)) + if self.gid_max is not None: + cmd.append('-K') + cmd.append('GID_MAX=' + str(self.gid_max)) cmd.append(self.name) return self.execute_command(cmd) @@ -292,6 +322,12 @@ class SunOS(Group): cmd.append(str(kwargs[key])) if self.non_unique: cmd.append('-o') + if self.gid_min is not None: + cmd.append('-K') + cmd.append('GID_MIN=' + str(self.gid_min)) + if self.gid_max is not None: + cmd.append('-K') + cmd.append('GID_MAX=' + str(self.gid_max)) cmd.append(self.name) return self.execute_command(cmd) @@ -323,6 +359,12 @@ class AIX(Group): cmd.append('id=' + str(kwargs[key])) elif key == 'system' and kwargs[key] is True: cmd.append('-a') + if self.gid_min is not None: + cmd.append('-K') + cmd.append('GID_MIN=' + str(self.gid_min)) + if self.gid_max is not None: + cmd.append('-K') + cmd.append('GID_MAX=' + str(self.gid_max)) cmd.append(self.name) return self.execute_command(cmd) @@ -368,6 +410,12 @@ class FreeBsdGroup(Group): cmd.append(str(self.gid)) if self.non_unique: cmd.append('-o') + if self.gid_min is not None: + cmd.append('-K') + cmd.append('GID_MIN=' + str(self.gid_min)) + if self.gid_max is not None: + cmd.append('-K') + cmd.append('GID_MAX=' + str(self.gid_max)) return self.execute_command(cmd) def group_mod(self, **kwargs): @@ -492,6 +540,12 @@ class OpenBsdGroup(Group): cmd.append(str(self.gid)) if self.non_unique: cmd.append('-o') + if self.gid_min is not None: + cmd.append('-K') + cmd.append('GID_MIN=' + str(self.gid_min)) + if self.gid_max is not None: + cmd.append('-K') + cmd.append('GID_MAX=' + str(self.gid_max)) cmd.append(self.name) return self.execute_command(cmd) @@ -538,6 +592,12 @@ class NetBsdGroup(Group): cmd.append(str(self.gid)) if self.non_unique: cmd.append('-o') + if self.gid_min is not None: + cmd.append('-K') + cmd.append('GID_MIN=' + str(self.gid_min)) + if self.gid_max is not None: + cmd.append('-K') + cmd.append('GID_MAX=' + str(self.gid_max)) cmd.append(self.name) return self.execute_command(cmd) @@ -578,6 +638,14 @@ class BusyBoxGroup(Group): if self.system: cmd.append('-S') + if self.gid_min is not None: + cmd.append('-K') + cmd.append('GID_MIN=' + str(self.gid_min)) + + if self.gid_max is not None: + cmd.append('-K') + cmd.append('GID_MAX=' + str(self.gid_max)) + cmd.append(self.name) return self.execute_command(cmd) @@ -626,6 +694,8 @@ def main(): system=dict(type='bool', default=False), local=dict(type='bool', default=False), non_unique=dict(type='bool', default=False), + gid_min=dict(type='int'), + gid_max=dict(type='int'), ), supports_check_mode=True, required_if=[ diff --git a/lib/ansible/modules/group_by.py b/lib/ansible/modules/group_by.py index 6efe8001514..5fc7b690af4 100644 --- a/lib/ansible/modules/group_by.py +++ b/lib/ansible/modules/group_by.py @@ -7,7 +7,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: group_by short_description: Create Ansible groups based on facts @@ -65,9 +65,9 @@ seealso: - module: ansible.builtin.add_host author: - Jeroen Hoekx (@jhoekx) -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Create groups based on the machine architecture ansible.builtin.group_by: key: machine_{{ ansible_machine }} @@ -85,4 +85,4 @@ EXAMPLES = r''' - name: Add all active hosts to a static group ansible.builtin.group_by: key: done -''' +""" diff --git a/lib/ansible/modules/hostname.py b/lib/ansible/modules/hostname.py index 681d63c508e..79f9bcb0709 100644 --- a/lib/ansible/modules/hostname.py +++ b/lib/ansible/modules/hostname.py @@ -7,7 +7,7 @@ from __future__ import annotations -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: hostname author: @@ -52,9 +52,9 @@ attributes: support: full platform: platforms: posix -''' +""" -EXAMPLES = ''' +EXAMPLES = """ - name: Set a hostname ansible.builtin.hostname: name: web01 @@ -63,7 +63,7 @@ EXAMPLES = ''' ansible.builtin.hostname: name: web01 use: systemd -''' +""" import os import platform @@ -516,7 +516,7 @@ class DarwinStrategy(BaseStrategy): However, macOS also has LocalHostName and ComputerName settings. LocalHostName controls the Bonjour/ZeroConf name, used by services like AirDrop. This class implements a method, _scrub_hostname(), that mimics - the transformations macOS makes on hostnames when enterened in the Sharing + the transformations macOS makes on hostnames when entered in the Sharing preference pane. It replaces spaces with dashes and removes all special characters. @@ -886,8 +886,6 @@ def main(): if name != current_hostname: name_before = current_hostname - elif name != permanent_hostname: - name_before = permanent_hostname else: name_before = permanent_hostname diff --git a/lib/ansible/modules/import_playbook.py b/lib/ansible/modules/import_playbook.py index a4c7809637f..71f1693241d 100644 --- a/lib/ansible/modules/import_playbook.py +++ b/lib/ansible/modules/import_playbook.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- author: Ansible Core Team (@ansible) module: import_playbook @@ -42,9 +42,9 @@ seealso: - module: ansible.builtin.include_tasks - ref: playbooks_reuse description: More information related to including and importing playbooks, roles and tasks. -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - hosts: localhost tasks: - ansible.builtin.debug: @@ -69,8 +69,8 @@ EXAMPLES = r''' - name: This fails because I'm inside a play already ansible.builtin.import_playbook: stuff.yaml -''' +""" -RETURN = r''' +RETURN = r""" # This module does not return anything except plays to execute. -''' +""" diff --git a/lib/ansible/modules/import_role.py b/lib/ansible/modules/import_role.py index 719d4297b92..0b9eff71244 100644 --- a/lib/ansible/modules/import_role.py +++ b/lib/ansible/modules/import_role.py @@ -5,7 +5,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- author: Ansible Core Team (@ansible) module: import_role @@ -87,9 +87,9 @@ seealso: - module: ansible.builtin.include_tasks - ref: playbooks_reuse description: More information related to including and importing playbooks, roles and tasks. -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - hosts: all tasks: - ansible.builtin.import_role: @@ -110,8 +110,8 @@ EXAMPLES = r''' ansible.builtin.import_role: name: myrole when: not idontwanttorun -''' +""" -RETURN = r''' +RETURN = r""" # This module does not return anything except tasks to execute. -''' +""" diff --git a/lib/ansible/modules/import_tasks.py b/lib/ansible/modules/import_tasks.py index 4d60368dcb2..26ef9d90198 100644 --- a/lib/ansible/modules/import_tasks.py +++ b/lib/ansible/modules/import_tasks.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- author: Ansible Core Team (@ansible) module: import_tasks @@ -46,9 +46,9 @@ seealso: - module: ansible.builtin.include_tasks - ref: playbooks_reuse description: More information related to including and importing playbooks, roles and tasks. -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - hosts: all tasks: - ansible.builtin.debug: @@ -69,8 +69,8 @@ EXAMPLES = r''' - name: Apply conditional to all imported tasks ansible.builtin.import_tasks: stuff.yaml when: hostvar is defined -''' +""" -RETURN = r''' +RETURN = r""" # This module does not return anything except tasks to execute. -''' +""" diff --git a/lib/ansible/modules/include_role.py b/lib/ansible/modules/include_role.py index 9fa07034df6..e800c5e61c9 100644 --- a/lib/ansible/modules/include_role.py +++ b/lib/ansible/modules/include_role.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- author: Ansible Core Team (@ansible) module: include_role @@ -92,9 +92,9 @@ seealso: - module: ansible.builtin.include_tasks - ref: playbooks_reuse description: More information related to including and importing playbooks, roles and tasks. -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - ansible.builtin.include_role: name: myrole @@ -131,8 +131,8 @@ EXAMPLES = r''' - install tags: - always -''' +""" -RETURN = r''' +RETURN = r""" # This module does not return anything except tasks to execute. -''' +""" diff --git a/lib/ansible/modules/include_tasks.py b/lib/ansible/modules/include_tasks.py index 82fb5865121..d2657960d98 100644 --- a/lib/ansible/modules/include_tasks.py +++ b/lib/ansible/modules/include_tasks.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- author: Ansible Core Team (@ansible) module: include_tasks @@ -50,9 +50,9 @@ seealso: - module: ansible.builtin.include_role - ref: playbooks_reuse description: More information related to including and importing playbooks, roles and tasks. -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - hosts: all tasks: - ansible.builtin.debug: @@ -91,8 +91,8 @@ EXAMPLES = r''' - install tags: - always -''' +""" -RETURN = r''' +RETURN = r""" # This module does not return anything except tasks to execute. -''' +""" diff --git a/lib/ansible/modules/include_vars.py b/lib/ansible/modules/include_vars.py index 9238682dead..b2e3c44e386 100644 --- a/lib/ansible/modules/include_vars.py +++ b/lib/ansible/modules/include_vars.py @@ -5,7 +5,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- author: Allen Sanabria (@linuxdynasty) module: include_vars @@ -112,9 +112,9 @@ seealso: - module: ansible.builtin.set_fact - ref: playbooks_delegation description: More information related to task delegation. -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Include vars of stuff.yaml into the 'stuff' variable (2.2). ansible.builtin.include_vars: file: stuff.yaml @@ -179,9 +179,9 @@ EXAMPLES = r''' - 'yaml' - 'yml' - 'json' -''' +""" -RETURN = r''' +RETURN = r""" ansible_facts: description: Variables that were included and their values returned: success @@ -193,4 +193,4 @@ ansible_included_var_files: type: list sample: [ /path/to/file.json, /path/to/file.yaml ] version_added: '2.4' -''' +""" diff --git a/lib/ansible/modules/iptables.py b/lib/ansible/modules/iptables.py index 9976d805c8d..164b53960b0 100644 --- a/lib/ansible/modules/iptables.py +++ b/lib/ansible/modules/iptables.py @@ -7,7 +7,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: iptables short_description: Modify iptables rules @@ -394,9 +394,9 @@ options: type: bool default: false version_added: "2.15" -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Block specific IP ansible.builtin.iptables: chain: INPUT @@ -543,7 +543,7 @@ EXAMPLES = r''' - "443" - "8081:8083" jump: ACCEPT -''' +""" import re @@ -848,6 +848,7 @@ def main(): required_if=[ ['jump', 'TEE', ['gateway']], ['jump', 'tee', ['gateway']], + ['flush', False, ['chain']], ] ) args = dict( @@ -865,10 +866,6 @@ def main(): ip_version = module.params['ip_version'] iptables_path = module.get_bin_path(BINS[ip_version], True) - # Check if chain option is required - if args['flush'] is False and args['chain'] is None: - module.fail_json(msg="Either chain or flush parameter must be specified.") - if module.params.get('log_prefix', None) or module.params.get('log_level', None): if module.params['jump'] is None: module.params['jump'] = 'LOG' diff --git a/lib/ansible/modules/known_hosts.py b/lib/ansible/modules/known_hosts.py index 8235258c664..c001915115d 100644 --- a/lib/ansible/modules/known_hosts.py +++ b/lib/ansible/modules/known_hosts.py @@ -5,12 +5,12 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: known_hosts short_description: Add or remove a host from the C(known_hosts) file description: - - The M(ansible.builtin.known_hosts) module lets you add or remove a host keys from the C(known_hosts) file. + - The M(ansible.builtin.known_hosts) module lets you add or remove host keys from the C(known_hosts) file. - Starting at Ansible 2.2, multiple entries per host are allowed, but only one for each key type supported by ssh. This is useful if you're going to want to use the M(ansible.builtin.git) module over ssh, for example. - If you have a very large number of host keys to manage, you will find the M(ansible.builtin.template) module more useful. @@ -19,7 +19,7 @@ options: name: aliases: [ 'host' ] description: - - The host to add or remove (must match a host specified in key). It will be converted to lowercase so that ssh-keygen can find it. + - The host to add or remove (must match a host specified in key). It will be converted to lowercase so that C(ssh-keygen) can find it. - Must match with or present in key attribute. - For custom SSH port, O(name) needs to specify port as well. See example section. type: str @@ -49,8 +49,8 @@ options: version_added: "2.3" state: description: - - V(present) to add the host key. - - V(absent) to remove it. + - V(present) to add host keys. + - V(absent) to remove host keys. choices: [ "absent", "present" ] default: "present" type: str @@ -65,9 +65,9 @@ extends_documentation_fragment: - action_common_attributes author: - Matthew Vernon (@mcv21) -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" - name: Tell the host about our servers it might want to ssh to ansible.builtin.known_hosts: path: /etc/ssh/ssh_known_hosts @@ -87,7 +87,7 @@ EXAMPLES = r''' key: '[host1.example.com]:2222 ssh-rsa ASDeararAIUHI324324' # some key gibberish path: /etc/ssh/ssh_known_hosts state: present -''' +""" # Makes sure public host keys are present or absent in the given known_hosts # file. @@ -101,6 +101,7 @@ EXAMPLES = r''' # state = absent|present (default: present) import base64 +import copy import errno import hashlib import hmac @@ -118,6 +119,7 @@ def enforce_state(module, params): Add or remove key. """ + results = dict(changed=False) host = params["name"].lower() key = params.get("key", None) path = params.get("path") @@ -140,13 +142,12 @@ def enforce_state(module, params): found, replace_or_add, found_line = search_for_host_key(module, host, key, path, sshkeygen) - params['diff'] = compute_diff(path, found_line, replace_or_add, state, key) + results['diff'] = compute_diff(path, found_line, replace_or_add, state, key) # check if we are trying to remove a non matching key, # in that case return with no change to the host if state == 'absent' and not found_line and key: - params['changed'] = False - return params + return results # We will change state if found==True & state!="present" # or found==False & state=="present" @@ -154,15 +155,15 @@ def enforce_state(module, params): # Alternatively, if replace is true (i.e. key present, and we must change # it) if module.check_mode: - module.exit_json(changed=replace_or_add or (state == "present") != found, - diff=params['diff']) + results['changed'] = replace_or_add or (state == "present") != found + module.exit_json(**results) # Now do the work. # Only remove whole host if found and no key provided if found and not key and state == "absent": module.run_command([sshkeygen, '-R', host, '-f', path], check_rc=True) - params['changed'] = True + results['changed'] = True # Next, add a new (or replacing) entry if replace_or_add or found != (state == "present"): @@ -188,19 +189,19 @@ def enforce_state(module, params): else: module.atomic_move(outf.name, path) - params['changed'] = True + results['changed'] = True - return params + return results def sanity_check(module, host, key, sshkeygen): - '''Check supplied key is sensible + """Check supplied key is sensible host and key are parameters provided by the user; If the host provided is inconsistent with the key supplied, then this function quits, providing an error to the user. sshkeygen is the path to ssh-keygen, found earlier with get_bin_path - ''' + """ # If no key supplied, we're doing a removal, and have nothing to check here. if not key: return @@ -231,7 +232,7 @@ def sanity_check(module, host, key, sshkeygen): def search_for_host_key(module, host, key, path, sshkeygen): - '''search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line) + """search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line) Looks up host and keytype in the known_hosts file path; if it's there, looks to see if one of those entries matches key. Returns: @@ -240,7 +241,7 @@ def search_for_host_key(module, host, key, path, sshkeygen): found_line (int or None): the line where a key of the same type was found if found=False, then replace is always False. sshkeygen is the path to ssh-keygen, found earlier with get_bin_path - ''' + """ if os.path.exists(path) is False: return False, False, None @@ -303,14 +304,14 @@ def hash_host_key(host, key): def normalize_known_hosts_key(key): - ''' + """ Transform a key, either taken from a known_host file or provided by the user, into a normalized form. The host part (which might include multiple hostnames or be hashed) gets replaced by the provided host. Also, any spurious information gets removed from the end (like the username@host tag usually present in hostkeys, but absent in known_hosts files) - ''' + """ key = key.strip() # trim trailing newline k = key.split() d = dict() @@ -364,7 +365,9 @@ def main(): supports_check_mode=True ) - results = enforce_state(module, module.params) + # TODO: deprecate returning everything that was passed in + results = copy.copy(module.params) + results.update(enforce_state(module, module.params)) module.exit_json(**results) diff --git a/lib/ansible/modules/lineinfile.py b/lib/ansible/modules/lineinfile.py index 9e9fdd9b8ef..0ef882f4840 100644 --- a/lib/ansible/modules/lineinfile.py +++ b/lib/ansible/modules/lineinfile.py @@ -8,7 +8,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" --- module: lineinfile short_description: Manage lines in text files @@ -87,13 +87,11 @@ options: - If specified, the line will be inserted after the last match of specified regular expression. - If the first match is required, use(firstmatch=yes). - A special value is available; V(EOF) for inserting the line at the end of the file. - - If specified regular expression has no matches, EOF will be used instead. + - If specified regular expression has no matches or no value is passed, V(EOF) will be used instead. - If O(insertbefore) is set, default value V(EOF) will be ignored. - If regular expressions are passed to both O(regexp) and O(insertafter), O(insertafter) is only honored if no match for O(regexp) is found. - May not be used with O(backrefs) or O(insertbefore). type: str - choices: [ EOF, '*regex*' ] - default: EOF insertbefore: description: - Used with O(state=present). @@ -104,7 +102,6 @@ options: - If regular expressions are passed to both O(regexp) and O(insertbefore), O(insertbefore) is only honored if no match for O(regexp) is found. - May not be used with O(backrefs) or O(insertafter). type: str - choices: [ BOF, '*regex*' ] version_added: "1.1" create: description: @@ -155,9 +152,9 @@ author: - Daniel Hokka Zakrissoni (@dhozac) - Ahti Kitsik (@ahtik) - Jose Angel Munoz (@imjoseangel) -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" # NOTE: Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path' - name: Ensure SELinux is set to enforcing mode ansible.builtin.lineinfile: @@ -240,9 +237,9 @@ EXAMPLES = r''' regexp: ^(host=).* line: \g<1>{{ hostname }} backrefs: yes -''' +""" -RETURN = r'''#''' +RETURN = r"""#""" import os import re diff --git a/lib/ansible/modules/meta.py b/lib/ansible/modules/meta.py index 0baea37d677..b10a56e2444 100644 --- a/lib/ansible/modules/meta.py +++ b/lib/ansible/modules/meta.py @@ -6,7 +6,7 @@ from __future__ import annotations -DOCUMENTATION = r''' +DOCUMENTATION = r""" module: meta short_description: Execute Ansible 'actions' version_added: '1.2' @@ -33,7 +33,12 @@ options: - V(end_host) (added in Ansible 2.8) is a per-host variation of V(end_play). Causes the play to end for the current host without failing it. - V(end_batch) (added in Ansible 2.12) causes the current batch (see C(serial)) to end without failing the host(s). Note that with C(serial=0) or undefined this behaves the same as V(end_play). - choices: [ clear_facts, clear_host_errors, end_host, end_play, flush_handlers, noop, refresh_inventory, reset_connection, end_batch ] + - V(end_role) (added in Ansible 2.18) causes the currently executing role to end without failing the host(s). + Effectively all tasks from within a role after V(end_role) is executed are ignored. Since handlers live in a global, + play scope, all handlers added via the role are unaffected and are still executed if notified. It is an error + to call V(end_role) from outside of a role or from a handler. Note that V(end_role) does not have an effect to + the parent roles or roles that depend (via dependencies in meta/main.yml) on a role executing V(end_role). + choices: [ clear_facts, clear_host_errors, end_host, end_play, flush_handlers, noop, refresh_inventory, reset_connection, end_batch, end_role ] required: true extends_documentation_fragment: - action_common_attributes @@ -73,9 +78,9 @@ seealso: - module: ansible.builtin.fail author: - Ansible Core Team -''' +""" -EXAMPLES = r''' +EXAMPLES = r""" # Example showing flushing handlers on demand, not at end of play - ansible.builtin.template: src: new.j2 @@ -121,4 +126,4 @@ EXAMPLES = r''' when: - ansible_distribution == 'CentOS' - ansible_distribution_major_version == '6' -''' +""" diff --git a/lib/ansible/modules/mount_facts.py b/lib/ansible/modules/mount_facts.py new file mode 100644 index 00000000000..f5d2bf47f3a --- /dev/null +++ b/lib/ansible/modules/mount_facts.py @@ -0,0 +1,651 @@ +# -*- coding: utf-8 -*- +# Copyright: Contributors to the Ansible project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import annotations + + +DOCUMENTATION = """ +--- +module: mount_facts +version_added: 2.18 +short_description: Retrieve mount information. +description: + - Retrieve information about mounts from preferred sources and filter the results based on the filesystem type and device. +options: + devices: + description: A list of fnmatch patterns to filter mounts by the special device or remote file system. + default: ~ + type: list + elements: str + fstypes: + description: A list of fnmatch patterns to filter mounts by the type of the file system. + default: ~ + type: list + elements: str + sources: + description: + - A list of sources used to determine the mounts. Missing file sources (or empty files) are skipped. Repeat sources, including symlinks, are skipped. + - The C(mount_points) return value contains the first definition found for a mount point. + - Additional mounts to the same mount point are available from C(aggregate_mounts) (if enabled). + - By default, mounts are retrieved from all of the standard locations, which have the predefined aliases V(all)/V(static)/V(dynamic). + - V(all) contains V(dynamic) and V(static). + - V(dynamic) contains V(/etc/mtab), V(/proc/mounts), V(/etc/mnttab), and the value of O(mount_binary) if it is not None. + This allows platforms like BSD or AIX, which don't have an equivalent to V(/proc/mounts), to collect the current mounts by default. + See the O(mount_binary) option to disable the fall back or configure a different executable. + - V(static) contains V(/etc/fstab), V(/etc/vfstab), and V(/etc/filesystems). + Note that V(/etc/filesystems) is specific to AIX. The Linux file by this name has a different format/purpose and is ignored. + - The value of O(mount_binary) can be configured as a source, which will cause it to always execute. + Depending on the other sources configured, this could be inefficient/redundant. + For example, if V(/proc/mounts) and V(mount) are listed as O(sources), Linux hosts will retrieve the same mounts twice. + default: ~ + type: list + elements: str + mount_binary: + description: + - The O(mount_binary) is used if O(sources) contain the value "mount", or if O(sources) contains a dynamic + source, and none were found (as can be expected on BSD or AIX hosts). + - Set to V(null) to stop after no dynamic file source is found instead. + type: raw + default: mount + timeout: + description: + - This is the maximum number of seconds to wait for each mount to complete. When this is V(null), wait indefinitely. + - Configure in conjunction with O(on_timeout) to skip unresponsive mounts. + - This timeout also applies to the O(mount_binary) command to list mounts. + - If the module is configured to run during the play's fact gathering stage, set a timeout using module_defaults to prevent a hang (see example). + type: float + on_timeout: + description: + - The action to take when gathering mount information exceeds O(timeout). + type: str + default: error + choices: + - error + - warn + - ignore + include_aggregate_mounts: + description: + - Whether or not the module should return the C(aggregate_mounts) list in C(ansible_facts). + - When this is V(null), a warning will be emitted if multiple mounts for the same mount point are found. + default: ~ + type: bool +extends_documentation_fragment: + - action_common_attributes +attributes: + check_mode: + support: full + diff_mode: + support: none + platform: + platforms: posix +author: + - Ansible Core Team + - Sloane Hertel (@s-hertel) +""" + +EXAMPLES = """ +- name: Get non-local devices + mount_facts: + devices: "[!/]*" + +- name: Get FUSE subtype mounts + mount_facts: + fstypes: + - "fuse.*" + +- name: Get NFS mounts during gather_facts with timeout + hosts: all + gather_facts: true + vars: + ansible_facts_modules: + - ansible.builtin.mount_facts + module_default: + ansible.builtin.mount_facts: + timeout: 10 + fstypes: + - nfs + - nfs4 + +- name: Get mounts from a non-default location + mount_facts: + sources: + - /usr/etc/fstab + +- name: Get mounts from the mount binary + mount_facts: + sources: + - mount + mount_binary: /sbin/mount +""" + +RETURN = """ +ansible_facts: + description: + - An ansible_facts dictionary containing a dictionary of C(mount_points) and list of C(aggregate_mounts) when enabled. + - Each key in C(mount_points) is a mount point, and the value contains mount information (similar to C(ansible_facts["mounts"])). + Each value also contains the key C(ansible_context), with details about the source and line(s) corresponding to the parsed mount point. + - When C(aggregate_mounts) are included, the containing dictionaries are the same format as the C(mount_point) values. + returned: on success + type: dict + sample: + mount_points: + /proc/sys/fs/binfmt_misc: + ansible_context: + source: /proc/mounts + source_data: "systemd-1 /proc/sys/fs/binfmt_misc autofs rw,relatime,fd=33,pgrp=1,timeout=0,minproto=5,maxproto=5,direct,pipe_ino=33850 0 0" + block_available: 0 + block_size: 4096 + block_total: 0 + block_used: 0 + device: "systemd-1" + dump: 0 + fstype: "autofs" + inode_available: 0 + inode_total: 0 + inode_used: 0 + mount: "/proc/sys/fs/binfmt_misc" + options: "rw,relatime,fd=33,pgrp=1,timeout=0,minproto=5,maxproto=5,direct,pipe_ino=33850" + passno: 0 + size_available: 0 + size_total: 0 + uuid: null + aggregate_mounts: + - ansible_context: + source: /proc/mounts + source_data: "systemd-1 /proc/sys/fs/binfmt_misc autofs rw,relatime,fd=33,pgrp=1,timeout=0,minproto=5,maxproto=5,direct,pipe_ino=33850 0 0" + block_available: 0 + block_size: 4096 + block_total: 0 + block_used: 0 + device: "systemd-1" + dump: 0 + fstype: "autofs" + inode_available: 0 + inode_total: 0 + inode_used: 0 + mount: "/proc/sys/fs/binfmt_misc" + options: "rw,relatime,fd=33,pgrp=1,timeout=0,minproto=5,maxproto=5,direct,pipe_ino=33850" + passno: 0 + size_available: 0 + size_total: 0 + uuid: null + - ansible_context: + source: /proc/mounts + source_data: "binfmt_misc /proc/sys/fs/binfmt_misc binfmt_misc rw,nosuid,nodev,noexec,relatime 0 0" + block_available: 0 + block_size: 4096 + block_total: 0 + block_used: 0 + device: binfmt_misc + dump: 0 + fstype: binfmt_misc + inode_available: 0 + inode_total: 0 + inode_used: 0 + mount: "/proc/sys/fs/binfmt_misc" + options: "rw,nosuid,nodev,noexec,relatime" + passno: 0 + size_available: 0 + size_total: 0 + uuid: null +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts import timeout as _timeout +from ansible.module_utils.facts.utils import get_mount_size, get_file_content + +from contextlib import suppress +from dataclasses import astuple, dataclass +from fnmatch import fnmatch + +import codecs +import datetime +import functools +import os +import re +import subprocess +import typing as t + +STATIC_SOURCES = ["/etc/fstab", "/etc/vfstab", "/etc/filesystems"] +DYNAMIC_SOURCES = ["/etc/mtab", "/proc/mounts", "/etc/mnttab"] + +# AIX and BSD don't have a file-based dynamic source, so the module also supports running a mount binary to collect these. +# Pattern for Linux, including OpenBSD and NetBSD +LINUX_MOUNT_RE = re.compile(r"^(?P\S+) on (?P\S+) type (?P\S+) \((?P.+)\)$") +# Pattern for other BSD including FreeBSD, DragonFlyBSD, and MacOS +BSD_MOUNT_RE = re.compile(r"^(?P\S+) on (?P\S+) \((?P.+)\)$") +# Pattern for AIX, example in https://www.ibm.com/docs/en/aix/7.2?topic=m-mount-command +AIX_MOUNT_RE = re.compile(r"^(?P\S*)\s+(?P\S+)\s+(?P\S+)\s+(?P\S+)\s+(?P