Mass update of test/ directory from devel

pull/15204/head
James Cammarata 9 years ago
parent 43cd33888b
commit 6b1648f708

@ -1,3 +1,4 @@
TEST_DIR ?= ~/ansible_testing
INVENTORY ?= inventory INVENTORY ?= inventory
VARS_FILE ?= integration_config.yml VARS_FILE ?= integration_config.yml
@ -20,38 +21,87 @@ MYTMPDIR = $(shell mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir')
VAULT_PASSWORD_FILE = vault-password VAULT_PASSWORD_FILE = vault-password
CONSUL_RUNNING := $(shell python consul_running.py) CONSUL_RUNNING := $(shell python consul_running.py)
EUID := $(shell id -u -r)
all: parsing test_var_precedence unicode test_templating_settings environment non_destructive destructive includes blocks check_mode test_hash test_handlers test_group_by test_vault test_tags test_lookup_paths no_log all: setup test_test_infra parsing test_var_precedence unicode test_templating_settings environment non_destructive destructive includes blocks pull check_mode test_hash test_handlers test_group_by test_vault test_tags test_lookup_paths no_log test_connection test_gathering_facts
parsing: test_test_infra:
ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5 # ensure fail/assert work locally and can stop execution with non-zero exit code
ansible-playbook good_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) PB_OUT=$$(ansible-playbook -i inventory.local test_test_infra.yml) ; APB_RC=$$? ; echo "$$PB_OUT" ; echo "rc was $$APB_RC (must be non-zero)" ; [ $$APB_RC -ne 0 ] ; echo "ensure playbook output shows assert/fail works (True)" ; echo "$$PB_OUT" | grep "fail works (True)" || exit 1 ; echo "$$PB_OUT" | fgrep "assert works (True)" || exit 1
# ensure we work using all specified test args, overridden inventory, etc
PB_OUT=$$(ansible-playbook -i $(INVENTORY) test_test_infra.yml -e @$(VARS_FILE) $(CREDENTIALS_ARG) $(TEST_FLAGS)) ; APB_RC=$$? ; echo "$$PB_OUT" ; echo "rc was $$APB_RC (must be non-zero)" ; [ $$APB_RC -ne 0 ] ; echo "ensure playbook output shows assert/fail works (True)" ; echo "$$PB_OUT" | grep "fail works (True)" || exit 1 ; echo "$$PB_OUT" | fgrep "assert works (True)" || exit 1
includes: setup:
ansible-playbook test_includes.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) $(TEST_FLAGS) rm -rf $(TEST_DIR)
mkdir -p $(TEST_DIR)
unicode: parsing: setup
ansible-playbook unicode.yml -i $(INVENTORY) -e @$(VARS_FILE) -v $(TEST_FLAGS) -e 'extra_var=café' ansible-playbook bad_parsing.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5
ansible-playbook good_parsing.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
includes: setup
ansible-playbook test_includes.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) $(TEST_FLAGS)
pull: pull_run pull_no_127 pull_limit_inventory
pull_run:
ansible-pull -d $(MYTMPDIR) -U https://github.com/ansible-test-robinro/pull-integration-test.git | grep MAGICKEYWORD; \
RC=$$? ; \
rm -rf $(MYTMPDIR); \
exit $$RC
# test for https://github.com/ansible/ansible/issues/13681
pull_no_127:
ansible-pull -d $(MYTMPDIR) -U https://github.com/ansible-test-robinro/pull-integration-test.git | grep -v 127\.0\.0\.1; \
RC=$$? ; \
rm -rf $(MYTMPDIR); \
exit $$RC
# test for https://github.com/ansible/ansible/issues/13688
pull_limit_inventory:
ansible-pull -d $(MYTMPDIR) -U https://github.com/ansible-test-robinro/pull-integration-test.git; \
RC=$$? ; \
rm -rf $(MYTMPDIR); \
exit $$RC
unicode: setup
ansible-playbook unicode.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) -v $(TEST_FLAGS) -e 'extra_var=café'
# Test the start-at-task flag #9571 # Test the start-at-task flag #9571
ansible-playbook unicode.yml -i $(INVENTORY) -e @$(VARS_FILE) -v --start-at-task '*¶' -e 'start_at_task=True' $(TEST_FLAGS) ansible-playbook unicode.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) -v --start-at-task '*¶' -e 'start_at_task=True' $(TEST_FLAGS)
test_templating_settings: setup
ansible-playbook test_templating_settings.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
test_templating_settings: test_gathering_facts: setup
ansible-playbook test_templating_settings.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) ansible-playbook test_gathering_facts.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) -v $(TEST_FLAGS)
environment: setup
ansible-playbook test_environment.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) $(TEST_FLAGS)
non_destructive: setup
ansible-playbook non_destructive.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
# Skip connection plugins which require root when not running as root.
ifneq ($(EUID),0)
TEST_CONNECTION_FILTER := !chroot
endif
environment: # Connection plugin test command to repeat with each locale setting.
ansible-playbook test_environment.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) $(TEST_FLAGS) TEST_CONNECTION_CMD = $(1) ansible-playbook test_connection.yml -i test_connection.inventory -l '!skip-during-build $(TEST_CONNECTION_FILTER)' $(TEST_FLAGS)
non_destructive: test_connection: setup
ansible-playbook non_destructive.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) $(call TEST_CONNECTION_CMD)
$(call TEST_CONNECTION_CMD, LC_ALL=C LANG=C)
destructive: destructive: setup
ansible-playbook destructive.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) ansible-playbook destructive.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
check_mode: check_mode: setup
ansible-playbook check_mode.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v --check $(TEST_FLAGS) ansible-playbook check_mode.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v --check $(TEST_FLAGS)
test_group_by: test_group_by: setup
ansible-playbook test_group_by.yml -i inventory.group_by -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) ansible-playbook test_group_by.yml -i inventory.group_by -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
test_handlers: test_handlers:
ansible-playbook test_handlers.yml --tags scenario1 -i inventory.handlers -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) ansible-playbook test_handlers.yml --tags scenario1 -i inventory.handlers -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
@ -75,14 +125,14 @@ test_hash:
ANSIBLE_HASH_BEHAVIOUR=replace ansible-playbook test_hash.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v -e '{"test_hash":{"extra_args":"this is an extra arg"}}' ANSIBLE_HASH_BEHAVIOUR=replace ansible-playbook test_hash.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v -e '{"test_hash":{"extra_args":"this is an extra arg"}}'
ANSIBLE_HASH_BEHAVIOUR=merge ansible-playbook test_hash.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v -e '{"test_hash":{"extra_args":"this is an extra arg"}}' ANSIBLE_HASH_BEHAVIOUR=merge ansible-playbook test_hash.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v -e '{"test_hash":{"extra_args":"this is an extra arg"}}'
test_var_precedence: test_var_precedence: setup
ansible-playbook test_var_precedence.yml -i $(INVENTORY) $(CREDENTIALS_ARG) $(TEST_FLAGS) -v -e 'extra_var=extra_var' -e 'extra_var_override=extra_var_override' ansible-playbook test_var_precedence.yml -i $(INVENTORY) $(CREDENTIALS_ARG) $(TEST_FLAGS) -v -e outputdir=$(TEST_DIR) -e 'extra_var=extra_var' -e 'extra_var_override=extra_var_override'
test_vault: test_vault: setup
ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) --list-tasks ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) --list-tasks -e outputdir=$(TEST_DIR)
ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) --list-hosts ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) --list-hosts -e outputdir=$(TEST_DIR)
ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) --syntax-check ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) --syntax-check -e outputdir=$(TEST_DIR)
ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) -e outputdir=$(TEST_DIR)
# test_delegate_to does not work unless we have permission to ssh to localhost. # test_delegate_to does not work unless we have permission to ssh to localhost.
# Would take some more effort on our test systems to implement that -- probably # Would take some more effort on our test systems to implement that -- probably
@ -90,31 +140,31 @@ test_vault:
# root user on a node to ssh to itself. Until then, this is not in make all. # root user on a node to ssh to itself. Until then, this is not in make all.
# Have to run it manually. Ordinary users should be able to run this test as # Have to run it manually. Ordinary users should be able to run this test as
# long as they have permissions to login to their local machine via ssh. # long as they have permissions to login to their local machine via ssh.
test_delegate_to: test_delegate_to: setup
ansible-playbook test_delegate_to.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) ansible-playbook test_delegate_to.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
test_winrm: test_winrm: setup
ansible-playbook test_winrm.yml -i inventory.winrm -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) ansible-playbook test_winrm.yml -i inventory.winrm -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
test_tags: test_tags: setup
# Run everything by default # Run everything by default
[ "$$(ansible-playbook --list-tasks test_tags.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) | fgrep Task_with | xargs)" = "Task_with_tag TAGS: [tag] Task_with_always_tag TAGS: [always] Task_without_tag TAGS: []" ] [ "$$(ansible-playbook --list-tasks test_tags.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) | fgrep Task_with | xargs)" = "Task_with_tag TAGS: [tag] Task_with_always_tag TAGS: [always] Task_without_tag TAGS: []" ]
# Run the exact tags, and always # Run the exact tags, and always
[ "$$(ansible-playbook --list-tasks --tags tag test_tags.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) | fgrep Task_with | xargs)" = "Task_with_tag TAGS: [tag] Task_with_always_tag TAGS: [always]" ] [ "$$(ansible-playbook --list-tasks --tags tag test_tags.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) | fgrep Task_with | xargs)" = "Task_with_tag TAGS: [tag] Task_with_always_tag TAGS: [always]" ]
# Skip one tag # Skip one tag
[ "$$(ansible-playbook --list-tasks --skip-tags tag test_tags.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) | fgrep Task_with | xargs)" = "Task_with_always_tag TAGS: [always] Task_without_tag TAGS: []" ] [ "$$(ansible-playbook --list-tasks --skip-tags tag test_tags.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) | fgrep Task_with | xargs)" = "Task_with_always_tag TAGS: [always] Task_without_tag TAGS: []" ]
blocks: blocks: setup
# remove old output log # remove old output log
rm -f block_test.out rm -f block_test.out
# run the test and check to make sure the right number of completions was logged # run the test and check to make sure the right number of completions was logged
ansible-playbook -vv test_blocks/main.yml | tee block_test.out ansible-playbook -vv -e outputdir=$(TEST_DIR) test_blocks/main.yml | tee block_test.out
[ "$$(grep 'TEST COMPLETE' block_test.out | wc -l)" == "$$(egrep '^[0-9]+ plays in' block_test.out | cut -f1 -d' ')" ] [ "$$(grep 'TEST COMPLETE' block_test.out | wc -l)" = "$$(egrep '^[0-9]+ plays in' block_test.out | cut -f1 -d' ')" ]
# cleanup the output log again, to make sure the test is clean # cleanup the output log again, to make sure the test is clean
rm -f block_test.out rm -f block_test.out
# run test with free strategy and again count the completions # run test with free strategy and again count the completions
ansible-playbook -vv test_blocks/main.yml -e test_strategy=free | tee block_test.out ansible-playbook -vv -e outputdir=$(TEST_DIR) test_blocks/main.yml -e test_strategy=free | tee block_test.out
[ "$$(grep 'TEST COMPLETE' block_test.out | wc -l)" == "$$(egrep '^[0-9]+ plays in' block_test.out | cut -f1 -d' ')" ] [ "$$(grep 'TEST COMPLETE' block_test.out | wc -l)" = "$$(egrep '^[0-9]+ plays in' block_test.out | cut -f1 -d' ')" ]
cloud: amazon rackspace azure cloud: amazon rackspace azure
@ -181,25 +231,25 @@ endif
test_galaxy: test_galaxy_spec test_galaxy_yaml test_galaxy_git test_galaxy: test_galaxy_spec test_galaxy_yaml test_galaxy_git
test_galaxy_spec: test_galaxy_spec: setup
mytmpdir=$(MYTMPDIR) ; \ mytmpdir=$(MYTMPDIR) ; \
ansible-galaxy install -r galaxy_rolesfile -p $$mytmpdir/roles -vvvv ; \ ansible-galaxy install -r galaxy_rolesfile -p $$mytmpdir/roles -vvvv ; \
cp galaxy_playbook.yml $$mytmpdir ; \ cp galaxy_playbook.yml $$mytmpdir ; \
ansible-playbook -i $(INVENTORY) $$mytmpdir/galaxy_playbook.yml -v $(TEST_FLAGS) ; \ ansible-playbook -i $(INVENTORY) $$mytmpdir/galaxy_playbook.yml -e outputdir=$(TEST_DIR) -v $(TEST_FLAGS) ; \
RC=$$? ; \ RC=$$? ; \
rm -rf $$mytmpdir ; \ rm -rf $$mytmpdir ; \
exit $$RC exit $$RC
test_galaxy_yaml: test_galaxy_yaml: setup
mytmpdir=$(MYTMPDIR) ; \ mytmpdir=$(MYTMPDIR) ; \
ansible-galaxy install -r galaxy_roles.yml -p $$mytmpdir/roles -vvvv; \ ansible-galaxy install -r galaxy_roles.yml -p $$mytmpdir/roles -vvvv; \
cp galaxy_playbook.yml $$mytmpdir ; \ cp galaxy_playbook.yml $$mytmpdir ; \
ansible-playbook -i $(INVENTORY) $$mytmpdir/galaxy_playbook.yml -v $(TEST_FLAGS) ; \ ansible-playbook -i $(INVENTORY) $$mytmpdir/galaxy_playbook.yml -e outputdir=$(TEST_DIR) -v $(TEST_FLAGS) ; \
RC=$$? ; \ RC=$$? ; \
rm -rf $$mytmpdir ; \ rm -rf $$mytmpdir ; \
exit $$RC exit $$RC
test_galaxy_git: test_galaxy_git: setup
mytmpdir=$(MYTMPDIR) ; \ mytmpdir=$(MYTMPDIR) ; \
ansible-galaxy install git+https://bitbucket.org/willthames/git-ansible-galaxy,v1.6 -p $$mytmpdir/roles -vvvv; \ ansible-galaxy install git+https://bitbucket.org/willthames/git-ansible-galaxy,v1.6 -p $$mytmpdir/roles -vvvv; \
cp galaxy_playbook_git.yml $$mytmpdir ; \ cp galaxy_playbook_git.yml $$mytmpdir ; \
@ -208,9 +258,9 @@ test_galaxy_git:
rm -rf $$mytmpdir ; \ rm -rf $$mytmpdir ; \
exit $$RC exit $$RC
test_lookup_paths: test_lookup_paths: setup
ansible-playbook lookup_paths/play.yml -i $(INVENTORY) -v $(TEST_FLAGS) ansible-playbook lookup_paths/play.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -v $(TEST_FLAGS)
no_log: no_log: setup
# This test expects 7 loggable vars and 0 non loggable ones, if either mismatches it fails, run the ansible-playbook command to debug # This test expects 7 loggable vars and 0 non loggable ones, if either mismatches it fails, run the ansible-playbook command to debug
[ "$$(ansible-playbook no_log_local.yml -i $(INVENTORY) -vvvvv | awk --source 'BEGIN { logme = 0; nolog = 0; } /LOG_ME/ { logme += 1;} /DO_NOT_LOG/ { nolog += 1;} END { printf "%d/%d", logme, nolog; }')" = "6/0" ] [ "$$(ansible-playbook no_log_local.yml -i $(INVENTORY) -e outputdir=$(TEST_DIR) -vvvvv | awk --source 'BEGIN { logme = 0; nolog = 0; } /LOG_ME/ { logme += 1;} /DO_NOT_LOG/ { nolog += 1;} END { printf "%d/%d", logme, nolog; }')" = "6/0" ]

@ -22,3 +22,9 @@
- { role: test_cs_account, tags: test_cs_account } - { role: test_cs_account, tags: test_cs_account }
- { role: test_cs_firewall, tags: test_cs_firewall } - { role: test_cs_firewall, tags: test_cs_firewall }
- { role: test_cs_loadbalancer_rule, tags: test_cs_loadbalancer_rule } - { role: test_cs_loadbalancer_rule, tags: test_cs_loadbalancer_rule }
- { role: test_cs_volume, tags: test_cs_volume }
- { role: test_cs_instance_facts, tags: test_cs_instance_facts }
- { role: test_cs_configuration, tags: test_cs_configuration }
- { role: test_cs_pod, tags: test_cs_pod }
- { role: test_cs_cluster, tags: test_cs_cluster }
- { role: test_cs_resourcelimit, tags: test_cs_resourcelimit }

@ -3,7 +3,7 @@
name: oracle_java7 name: oracle_java7
- src: git+http://bitbucket.org/willthames/git-ansible-galaxy - src: git+http://bitbucket.org/willthames/git-ansible-galaxy
version: v1.6 version: pr-10620
- src: http://bitbucket.org/willthames/hg-ansible-galaxy - src: http://bitbucket.org/willthames/hg-ansible-galaxy
scm: hg scm: hg

@ -1,7 +1,7 @@
# deliberate non-empty whitespace line to follow # deliberate non-empty whitespace line to follow
git+https://bitbucket.org/willthames/git-ansible-galaxy,v1.6 git+https://bitbucket.org/willthames/git-ansible-galaxy,pr-10620
hg+https://bitbucket.org/willthames/hg-ansible-galaxy hg+https://bitbucket.org/willthames/hg-ansible-galaxy
https://bitbucket.org/willthames/http-ansible-galaxy/get/master.tar.gz,,http-role https://bitbucket.org/willthames/http-ansible-galaxy/get/master.tar.gz,,http-role
# comment # comment

@ -4,6 +4,9 @@ testhost2 ansible_ssh_host=127.0.0.1 ansible_connection=local
# For testing delegate_to # For testing delegate_to
testhost3 ansible_ssh_host=127.0.0.3 testhost3 ansible_ssh_host=127.0.0.3
testhost4 ansible_ssh_host=127.0.0.4 testhost4 ansible_ssh_host=127.0.0.4
# For testing fact gathering
facthost[0:8] ansible_host=1270.0.0.1 ansible_connection=local
# the following inline declarations are accompanied # the following inline declarations are accompanied
# by (preferred) group_vars/ and host_vars/ variables # by (preferred) group_vars/ and host_vars/ variables

@ -0,0 +1,2 @@
testhost ansible_connection=local

@ -17,16 +17,15 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: clean out the test directory #- name: clean out the test directory
file: name={{output_dir|mandatory}} state=absent # file: name={{output_dir|mandatory}} state=absent
always_run: True # always_run: True
tags: # tags:
- prepare # - prepare
when: clean_working_dir|default("yes")|bool # when: clean_working_dir|default("yes")|bool
#
- name: create the test directory #- name: create the test directory
file: name={{output_dir}} state=directory # file: name={{output_dir}} state=directory
always_run: True # always_run: True
tags: # tags:
- prepare # - prepare

@ -28,17 +28,17 @@
- name: install mysqldb_test rpm dependencies - name: install mysqldb_test rpm dependencies
yum: name={{ item }} state=latest yum: name={{ item }} state=latest
with_items: mysql_packages with_items: "{{mysql_packages}}"
when: ansible_pkg_mgr == 'yum' when: ansible_pkg_mgr == 'yum'
- name: install mysqldb_test rpm dependencies - name: install mysqldb_test rpm dependencies
dnf: name={{ item }} state=latest dnf: name={{ item }} state=latest
with_items: mysql_packages with_items: "{{mysql_packages}}"
when: ansible_pkg_mgr == 'dnf' when: ansible_pkg_mgr == 'dnf'
- name: install mysqldb_test debian dependencies - name: install mysqldb_test debian dependencies
apt: name={{ item }} state=latest apt: name={{ item }} state=latest
with_items: mysql_packages with_items: "{{mysql_packages}}"
when: ansible_pkg_mgr == 'apt' when: ansible_pkg_mgr == 'apt'
- name: start mysql_db service if not running - name: start mysql_db service if not running

@ -10,12 +10,12 @@
# Make sure we start fresh # Make sure we start fresh
- name: remove rpm dependencies for postgresql test - name: remove rpm dependencies for postgresql test
package: name={{ item }} state=absent package: name={{ item }} state=absent
with_items: postgresql_packages with_items: "{{postgresql_packages}}"
when: ansible_os_family == "RedHat" when: ansible_os_family == "RedHat"
- name: remove dpkg dependencies for postgresql test - name: remove dpkg dependencies for postgresql test
apt: name={{ item }} state=absent apt: name={{ item }} state=absent
with_items: postgresql_packages with_items: "{{postgresql_packages}}"
when: ansible_pkg_mgr == 'apt' when: ansible_pkg_mgr == 'apt'
- name: remove old db (red hat) - name: remove old db (red hat)
@ -36,12 +36,12 @@
- name: install rpm dependencies for postgresql test - name: install rpm dependencies for postgresql test
package: name={{ item }} state=latest package: name={{ item }} state=latest
with_items: postgresql_packages with_items: "{{postgresql_packages}}"
when: ansible_os_family == "RedHat" when: ansible_os_family == "RedHat"
- name: install dpkg dependencies for postgresql test - name: install dpkg dependencies for postgresql test
apt: name={{ item }} state=latest apt: name={{ item }} state=latest
with_items: postgresql_packages with_items: "{{postgresql_packages}}"
when: ansible_pkg_mgr == 'apt' when: ansible_pkg_mgr == 'apt'
- name: Initialize postgres (systemd) - name: Initialize postgres (systemd)

@ -2,6 +2,7 @@
- set_fact: - set_fact:
test_ppa_name: 'ppa:menulibre-dev/devel' test_ppa_name: 'ppa:menulibre-dev/devel'
test_ppa_filename: 'menulibre-dev'
test_ppa_spec: 'deb http://ppa.launchpad.net/menulibre-dev/devel/ubuntu {{ansible_distribution_release}} main' test_ppa_spec: 'deb http://ppa.launchpad.net/menulibre-dev/devel/ubuntu {{ansible_distribution_release}} main'
test_ppa_key: 'A7AD98A1' # http://keyserver.ubuntu.com:11371/pks/lookup?search=0xD06AAF4C11DAB86DF421421EFE6B20ECA7AD98A1&op=index test_ppa_key: 'A7AD98A1' # http://keyserver.ubuntu.com:11371/pks/lookup?search=0xD06AAF4C11DAB86DF421421EFE6B20ECA7AD98A1&op=index
@ -144,6 +145,47 @@
- name: 'ensure ppa key is absent (expect: pass)' - name: 'ensure ppa key is absent (expect: pass)'
apt_key: id='{{test_ppa_key}}' state=absent apt_key: id='{{test_ppa_key}}' state=absent
#
# TEST: apt_repository: repo=<spec> filename=<filename>
#
- include: 'cleanup.yml'
- name: 'record apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_before
- name: 'name=<spec> filename=<filename> (expect: pass)'
apt_repository: repo='{{test_ppa_spec}}' filename='{{test_ppa_filename}}' state=present
register: result
- assert:
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == "{{test_ppa_spec}}"'
- name: 'examine source file'
stat: path='/etc/apt/sources.list.d/{{test_ppa_filename}}.list'
register: source_file
- name: 'assert source file exists'
assert:
that:
- 'source_file.stat.exists == True'
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_after
- name: 'assert the apt cache did change'
assert:
that:
- 'cache_before.stat.mtime != cache_after.stat.mtime'
# When installing a repo with the spec, the key is *NOT* added
- name: 'ensure ppa key is absent (expect: pass)'
apt_key: id='{{test_ppa_key}}' state=absent
# #
# TEARDOWN # TEARDOWN
# #

@ -117,6 +117,10 @@
register: recursive_copy_result register: recursive_copy_result
- debug: var=recursive_copy_result - debug: var=recursive_copy_result
- name: assert that the recursive copy did something
assert:
that:
- "recursive_copy_result|changed"
- name: check that a file in a directory was transferred - name: check that a file in a directory was transferred
stat: path={{output_dir}}/sub/subdir/bar.txt stat: path={{output_dir}}/sub/subdir/bar.txt
@ -150,7 +154,7 @@
assert: assert:
that: that:
- "{{item.stat.mode}} == 0700" - "{{item.stat.mode}} == 0700"
with_items: dir_stats.results with_items: "{{dir_stats.results}}"
# errors on this aren't presently ignored so this test is commented out. But it would be nice to fix. # errors on this aren't presently ignored so this test is commented out. But it would be nice to fix.

@ -0,0 +1,3 @@
---
dependencies:
- test_cs_common

@ -0,0 +1,211 @@
---
- name: setup cluster is absent
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
state: absent
register: cluster
- name: verify setup cluster is absent
assert:
that:
- cluster|success
- name: setup zone is present
cs_zone:
name: "{{ cs_resource_prefix }}-zone"
dns1: 8.8.8.8
dns2: 8.8.4.4
network_type: basic
register: zone
- name: verify setup zone is present
assert:
that:
- zone|success
- name: setup pod is preset
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
start_ip: 10.100.10.101
gateway: 10.100.10.1
netmask: 255.255.255.0
register: pod
- name: verify setup pod is preset
assert:
that:
- pod|success
- name: test fail if missing name
cs_cluster:
register: cluster
ignore_errors: true
- name: verify results of fail if missing name
assert:
that:
- cluster|failed
- "cluster.msg == 'missing required arguments: name'"
- name: test create cluster
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
zone: "{{ cs_resource_prefix }}-zone"
hypervisor: simulator
cluster_type: CloudManaged
register: cluster_origin
tags: disable
- name: verify test create cluster
assert:
that:
- cluster_origin|changed
- cluster_origin.name == "{{ cs_resource_prefix }}-cluster"
- cluster_origin.zone == "{{ cs_resource_prefix }}-zone"
- cluster_origin.allocation_state == "Enabled"
- cluster_origin.hypervisor == "Simulator"
- cluster_origin.cluster_type == "CloudManaged"
- name: test create cluster idempotence
cs_cluster:
name: "{{ cs_resource_prefix }}-Cluster"
zone: "{{ cs_resource_prefix }}-Zone"
hypervisor: Simulator
cluster_type: CloudManaged
register: cluster
- name: verify test create cluster idempotence
assert:
that:
- cluster.id == cluster_origin.id
- not cluster|changed
- cluster.name == "{{ cs_resource_prefix }}-cluster"
- cluster.zone == "{{ cs_resource_prefix }}-zone"
- cluster.allocation_state == "Enabled"
- cluster_origin.hypervisor == "Simulator"
- cluster.cluster_type == "CloudManaged"
- name: test update cluster
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
zone: "{{ cs_resource_prefix }}-zone"
hypervisor: simulator
cluster_type: ExternalManaged
register: cluster
- name: verify test update cluster
assert:
that:
- cluster|changed
- cluster.name == "{{ cs_resource_prefix }}-cluster"
- cluster.zone == "{{ cs_resource_prefix }}-zone"
- cluster.allocation_state == "Enabled"
- cluster.hypervisor == "Simulator"
- cluster.cluster_type == "ExternalManaged"
- cluster.id == cluster_origin.id
- name: test update cluster idempotence
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
zone: "{{ cs_resource_prefix }}-zone"
hypervisor: simulator
cluster_type: ExternalManaged
register: cluster
- name: verify test update cluster idempotence
assert:
that:
- not cluster|changed
- cluster.name == "{{ cs_resource_prefix }}-cluster"
- cluster.zone == "{{ cs_resource_prefix }}-zone"
- cluster.allocation_state == "Enabled"
- cluster.hypervisor == "Simulator"
- cluster.cluster_type == "ExternalManaged"
- cluster.id == cluster_origin.id
- name: test disable cluster
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
state: disabled
register: cluster
tags: disable
- name: verify test disable cluster
assert:
that:
- cluster|changed
- cluster.name == "{{ cs_resource_prefix }}-cluster"
- cluster.zone == "{{ cs_resource_prefix }}-zone"
- cluster.allocation_state == "Disabled"
- cluster.hypervisor == "Simulator"
- cluster.cluster_type == "ExternalManaged"
- cluster.id == cluster_origin.id
tags: disable
- name: test disable cluster idempotence
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
state: disabled
register: cluster
tags: disable
- name: verify test disable cluster idempotence
assert:
that:
- not cluster|changed
- cluster.name == "{{ cs_resource_prefix }}-cluster"
- cluster.zone == "{{ cs_resource_prefix }}-zone"
- cluster.allocation_state == "Disabled"
- cluster.hypervisor == "Simulator"
- cluster.cluster_type == "ExternalManaged"
tags: disable
- name: test enable cluster
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
state: enabled
register: cluster
- name: verify test enable cluster
assert:
that:
- cluster|changed
- cluster.name == "{{ cs_resource_prefix }}-cluster"
- cluster.zone == "{{ cs_resource_prefix }}-zone"
- cluster.allocation_state == "Enabled"
- cluster.hypervisor == "Simulator"
- cluster.cluster_type == "ExternalManaged"
- cluster.id == cluster_origin.id
- name: test enable cluster idempotence
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
state: enabled
register: cluster
- name: verify test enable cluster idempotence
assert:
that:
- not cluster|changed
- cluster.name == "{{ cs_resource_prefix }}-cluster"
- cluster.zone == "{{ cs_resource_prefix }}-zone"
- cluster.allocation_state == "Enabled"
- cluster.hypervisor == "Simulator"
- cluster.cluster_type == "ExternalManaged"
- cluster.id == cluster_origin.id
- name: test remove cluster
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
zone: "{{ cs_resource_prefix }}-zone"
state: absent
register: cluster
- name: verify test remove cluster
assert:
that:
- cluster.id == cluster_origin.id
- cluster|changed
- cluster.name == "{{ cs_resource_prefix }}-cluster"
- cluster.zone == "{{ cs_resource_prefix }}-zone"
- cluster.allocation_state == "Enabled"
- cluster_origin.hypervisor == "Simulator"
- name: test remove cluster idempotence
cs_cluster:
name: "{{ cs_resource_prefix }}-cluster"
zone: "{{ cs_resource_prefix }}-zone"
state: absent
register: cluster
- name: verify test remove cluster idempotence
assert:
that:
- not cluster|changed

@ -0,0 +1,5 @@
---
test_cs_configuration_storage: PS0
test_cs_configuration_cluster: C0
test_cs_configuration_account: admin
test_cs_configuration_zone: Sandbox-simulator

@ -0,0 +1,3 @@
---
dependencies:
- test_cs_common

@ -0,0 +1,59 @@
---
- name: test configuration account
cs_configuration:
name: allow.public.user.templates
account: "{{ test_cs_configuration_account }}"
value: true
register: config
- name: verify test configuration storage
assert:
that:
- config|success
- name: test update configuration account
cs_configuration:
name: allow.public.user.templates
account: "{{ test_cs_configuration_account }}"
value: false
register: config
- name: verify update configuration account
assert:
that:
- config|success
- config|changed
- config.value == "false"
- config.name == "allow.public.user.templates"
- config.scope == "account"
- config.account == "{{ test_cs_configuration_account }}"
- name: test update configuration account idempotence
cs_configuration:
name: allow.public.user.templates
account: "{{ test_cs_configuration_account }}"
value: false
register: config
- name: verify update configuration account idempotence
assert:
that:
- config|success
- not config|changed
- config.value == "false"
- config.name == "allow.public.user.templates"
- config.scope == "account"
- config.account == "{{ test_cs_configuration_account }}"
- name: test reset configuration account
cs_configuration:
name: allow.public.user.templates
account: "{{ test_cs_configuration_account }}"
value: true
register: config
- name: verify update configuration account
assert:
that:
- config|success
- config|changed
- config.value == "true"
- config.name == "allow.public.user.templates"
- config.scope == "account"
- config.account == "{{ test_cs_configuration_account }}"

@ -0,0 +1,59 @@
---
- name: test configuration cluster
cs_configuration:
name: cpu.overprovisioning.factor
cluster: "{{ test_cs_configuration_cluster }}"
value: 1.0
register: config
- name: verify test configuration cluster
assert:
that:
- config|success
- name: test update configuration cluster
cs_configuration:
name: cpu.overprovisioning.factor
cluster: "{{ test_cs_configuration_cluster }}"
value: 2.0
register: config
- name: verify update configuration cluster
assert:
that:
- config|success
- config|changed
- config.value == "2.0"
- config.name == "cpu.overprovisioning.factor"
- config.scope == "cluster"
- config.cluster == "{{ test_cs_configuration_cluster }}"
- name: test update configuration cluster idempotence
cs_configuration:
name: cpu.overprovisioning.factor
cluster: "{{ test_cs_configuration_cluster }}"
value: 2.0
register: config
- name: verify update configuration cluster idempotence
assert:
that:
- config|success
- not config|changed
- config.value == "2.0"
- config.name == "cpu.overprovisioning.factor"
- config.scope == "cluster"
- config.cluster == "{{ test_cs_configuration_cluster }}"
- name: test reset configuration cluster
cs_configuration:
name: cpu.overprovisioning.factor
cluster: "{{ test_cs_configuration_cluster }}"
value: 1.0
register: config
- name: verify reset configuration cluster
assert:
that:
- config|success
- config|changed
- config.value == "1.0"
- config.name == "cpu.overprovisioning.factor"
- config.scope == "cluster"
- config.cluster == "{{ test_cs_configuration_cluster }}"

@ -0,0 +1,162 @@
---
- name: test fail if missing name
cs_configuration:
register: config
ignore_errors: true
- name: verify results of fail if missing arguments
assert:
that:
- config|failed
- "config.msg == 'missing required arguments: value,name'"
- name: test configuration
cs_configuration:
name: network.loadbalancer.haproxy.stats.visibility
value: global
register: config
- name: verify test configuration
assert:
that:
- config|success
- name: test update configuration string
cs_configuration:
name: network.loadbalancer.haproxy.stats.visibility
value: all
register: config
- name: verify test update configuration string
assert:
that:
- config|success
- config|changed
- config.value == "all"
- config.name == "network.loadbalancer.haproxy.stats.visibility"
- name: test update configuration string idempotence
cs_configuration:
name: network.loadbalancer.haproxy.stats.visibility
value: all
register: config
- name: verify test update configuration string idempotence
assert:
that:
- config|success
- not config|changed
- config.value == "all"
- config.name == "network.loadbalancer.haproxy.stats.visibility"
- name: test reset configuration string
cs_configuration:
name: network.loadbalancer.haproxy.stats.visibility
value: global
register: config
- name: verify test reset configuration string
assert:
that:
- config|success
- config|changed
- config.value == "global"
- config.name == "network.loadbalancer.haproxy.stats.visibility"
- name: test configuration
cs_configuration:
name: vmware.recycle.hung.wokervm
value: false
register: config
- name: verify test configuration
assert:
that:
- config|success
- name: test update configuration bool
cs_configuration:
name: vmware.recycle.hung.wokervm
value: true
register: config
- name: verify test update configuration bool
assert:
that:
- config|success
- config|changed
- config.value == "true"
- config.name == "vmware.recycle.hung.wokervm"
- name: test update configuration bool idempotence
cs_configuration:
name: vmware.recycle.hung.wokervm
value: true
register: config
- name: verify test update configuration bool idempotence
assert:
that:
- config|success
- not config|changed
- config.value == "true"
- config.name == "vmware.recycle.hung.wokervm"
- name: test reset configuration bool
cs_configuration:
name: vmware.recycle.hung.wokervm
value: false
register: config
- name: verify test reset configuration bool
assert:
that:
- config|success
- config|changed
- config.value == "false"
- config.name == "vmware.recycle.hung.wokervm"
- name: test configuration
cs_configuration:
name: agent.load.threshold
value: 0.7
register: config
- name: verify test configuration
assert:
that:
- config|success
- name: test update configuration float
cs_configuration:
name: agent.load.threshold
value: 0.81
register: config
- name: verify update configuration float
assert:
that:
- config|success
- config|changed
- config.value == "0.81"
- config.name == "agent.load.threshold"
- name: test update configuration float idempotence
cs_configuration:
name: agent.load.threshold
value: 0.81
register: config
- name: verify update configuration float idempotence
assert:
that:
- config|success
- not config|changed
- config.value == "0.81"
- config.name == "agent.load.threshold"
- name: reset configuration float
cs_configuration:
name: agent.load.threshold
value: 0.7
register: config
- name: verify reset configuration float
assert:
that:
- config|success
- config|changed
- config.value == "0.7"
- config.name == "agent.load.threshold"
- include: storage.yml
- include: account.yml
- include: zone.yml
- include: cluster.yml

@ -0,0 +1,59 @@
---
- name: test configuration storage
cs_configuration:
name: storage.overprovisioning.factor
storage: "{{ test_cs_configuration_storage }}"
value: 2.0
register: config
- name: verify test configuration storage
assert:
that:
- config|success
- name: test update configuration storage
cs_configuration:
name: storage.overprovisioning.factor
storage: "{{ test_cs_configuration_storage }}"
value: 3.0
register: config
- name: verify update configuration storage
assert:
that:
- config|success
- config|changed
- config.value == "3.0"
- config.name == "storage.overprovisioning.factor"
- config.scope == "storagepool"
- config.storage == "{{ test_cs_configuration_storage }}"
- name: test update configuration storage idempotence
cs_configuration:
name: storage.overprovisioning.factor
storage: "{{ test_cs_configuration_storage }}"
value: 3.0
register: config
- name: verify update configuration storage idempotence
assert:
that:
- config|success
- not config|changed
- config.value == "3.0"
- config.name == "storage.overprovisioning.factor"
- config.scope == "storagepool"
- config.storage == "{{ test_cs_configuration_storage }}"
- name: test reset configuration storage
cs_configuration:
name: storage.overprovisioning.factor
storage: "{{ test_cs_configuration_storage }}"
value: 2.0
register: config
- name: verify reset configuration storage
assert:
that:
- config|success
- config|changed
- config.value == "2.0"
- config.name == "storage.overprovisioning.factor"
- config.scope == "storagepool"
- config.storage == "{{ test_cs_configuration_storage }}"

@ -0,0 +1,59 @@
---
- name: test configuration zone
cs_configuration:
name: use.external.dns
zone: "{{ test_cs_configuration_zone }}"
value: false
register: config
- name: verify test configuration zone
assert:
that:
- config|success
- name: test update configuration zone
cs_configuration:
name: use.external.dns
zone: "{{ test_cs_configuration_zone }}"
value: true
register: config
- name: verify update configuration zone
assert:
that:
- config|success
- config|changed
- config.value == "true"
- config.name == "use.external.dns"
- config.scope == "zone"
- config.zone == "{{ test_cs_configuration_zone }}"
- name: test update configuration zone idempotence
cs_configuration:
name: use.external.dns
zone: "{{ test_cs_configuration_zone }}"
value: true
register: config
- name: verify update configuration zone idempotence
assert:
that:
- config|success
- not config|changed
- config.value == "true"
- config.name == "use.external.dns"
- config.scope == "zone"
- config.zone == "{{ test_cs_configuration_zone }}"
- name: test reset configuration zone
cs_configuration:
name: use.external.dns
zone: "{{ test_cs_configuration_zone }}"
value: false
register: config
- name: verify reset configuration zone
assert:
that:
- config|success
- config|changed
- config.value == "false"
- config.name == "use.external.dns"
- config.scope == "zone"
- config.zone == "{{ test_cs_configuration_zone }}"

@ -0,0 +1,3 @@
---
test_cs_instance_template: CentOS 5.3(64-bit) no GUI (Simulator)
test_cs_instance_offering_1: Small Instance

@ -0,0 +1,3 @@
---
dependencies:
- test_cs_common

@ -0,0 +1,55 @@
---
- name: setup ssh key
cs_sshkeypair: name={{ cs_resource_prefix }}-sshkey
register: sshkey
- name: verify setup ssh key
assert:
that:
- sshkey|success
- name: setup affinity group
cs_affinitygroup: name={{ cs_resource_prefix }}-ag
register: ag
- name: verify setup affinity group
assert:
that:
- ag|success
- name: setup security group
cs_securitygroup: name={{ cs_resource_prefix }}-sg
register: sg
- name: verify setup security group
assert:
that:
- sg|success
- name: setup instance
cs_instance:
name: "{{ cs_resource_prefix }}-vm"
template: "{{ test_cs_instance_template }}"
service_offering: "{{ test_cs_instance_offering_1 }}"
affinity_group: "{{ cs_resource_prefix }}-ag"
security_group: "{{ cs_resource_prefix }}-sg"
ssh_key: "{{ cs_resource_prefix }}-sshkey"
tags: []
register: instance
- name: verify create instance
assert:
that:
- instance|success
- name: test instance facts
cs_instance_facts:
name: "{{ cs_resource_prefix }}-vm"
register: instance_facts
- name: verify test instance facts
assert:
that:
- instance_facts|success
- not instance_facts|changed
- cloudstack_instance.id == instance.id
- cloudstack_instance.domain == instance.domain
- cloudstack_instance.account == instance.account
- cloudstack_instance.zone == instance.zone
- cloudstack_instance.name == instance.name
- cloudstack_instance.service_offering == instance.service_offering

@ -0,0 +1,3 @@
---
dependencies:
- test_cs_common

@ -0,0 +1,210 @@
---
- name: setup pod is absent
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
state: absent
register: pod
- name: verify setup pod is absent
assert:
that:
- pod|success
- name: setup zone is present
cs_zone:
name: "{{ cs_resource_prefix }}-zone"
dns1: 8.8.8.8
dns2: 8.8.4.4
network_type: basic
register: zone
- name: verify setup zone is present
assert:
that:
- zone|success
- name: test fail if missing name
cs_pod:
register: pod
ignore_errors: true
- name: verify results of fail if missing name
assert:
that:
- pod|failed
- "pod.msg == 'missing required arguments: name'"
- name: test create pod
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
start_ip: 10.100.10.101
gateway: 10.100.10.1
netmask: 255.255.255.0
register: pod_origin
- name: verify test create pod
assert:
that:
- pod_origin|changed
- pod_origin.allocation_state == "Enabled"
- pod_origin.start_ip == "10.100.10.101"
- pod_origin.end_ip == "10.100.10.254"
- pod_origin.gateway == "10.100.10.1"
- pod_origin.netmask == "255.255.255.0"
- pod_origin.zone == "{{ cs_resource_prefix }}-zone"
- name: test create pod idempotence
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
start_ip: 10.100.10.101
gateway: 10.100.10.1
netmask: 255.255.255.0
register: pod
- name: verify test create pod idempotence
assert:
that:
- not pod|changed
- pod.allocation_state == "Enabled"
- pod.start_ip == "10.100.10.101"
- pod.end_ip == "10.100.10.254"
- pod.gateway == "10.100.10.1"
- pod.netmask == "255.255.255.0"
- pod.zone == "{{ cs_resource_prefix }}-zone"
- name: test update pod
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
start_ip: 10.100.10.102
gateway: 10.100.10.1
netmask: 255.255.255.0
register: pod
- name: verify test update pod
assert:
that:
- pod|changed
- pod.allocation_state == "Enabled"
- pod.start_ip == "10.100.10.102"
- pod.end_ip == "10.100.10.254"
- pod.gateway == "10.100.10.1"
- pod.netmask == "255.255.255.0"
- pod.zone == "{{ cs_resource_prefix }}-zone"
- name: test update pod idempotence
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
start_ip: 10.100.10.102
gateway: 10.100.10.1
netmask: 255.255.255.0
register: pod
- name: verify test update pod idempotence
assert:
that:
- not pod|changed
- pod.allocation_state == "Enabled"
- pod.start_ip == "10.100.10.102"
- pod.end_ip == "10.100.10.254"
- pod.gateway == "10.100.10.1"
- pod.netmask == "255.255.255.0"
- pod.zone == "{{ cs_resource_prefix }}-zone"
- name: test disable pod
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
state: disabled
register: pod
- name: verify test enable pod
assert:
that:
- pod|changed
- pod.allocation_state == "Disabled"
- pod.id == pod_origin.id
- pod.start_ip == "10.100.10.102"
- pod.end_ip == "10.100.10.254"
- pod.gateway == "10.100.10.1"
- pod.netmask == "255.255.255.0"
- pod.zone == "{{ cs_resource_prefix }}-zone"
- name: test disable pod idempotence
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
state: disabled
register: pod
- name: verify test enable pod idempotence
assert:
that:
- not pod|changed
- pod.allocation_state == "Disabled"
- pod.id == pod_origin.id
- pod.start_ip == "10.100.10.102"
- pod.end_ip == "10.100.10.254"
- pod.gateway == "10.100.10.1"
- pod.netmask == "255.255.255.0"
- pod.zone == "{{ cs_resource_prefix }}-zone"
- name: test enable pod
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
state: enabled
register: pod
- name: verify test disable pod
assert:
that:
- pod|changed
- pod.allocation_state == "Enabled"
- pod.id == pod_origin.id
- pod.start_ip == "10.100.10.102"
- pod.end_ip == "10.100.10.254"
- pod.gateway == "10.100.10.1"
- pod.netmask == "255.255.255.0"
- pod.zone == "{{ cs_resource_prefix }}-zone"
- name: test enable pod idempotence
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
state: enabled
register: pod
- name: verify test enabled pod idempotence
assert:
that:
- not pod|changed
- pod.allocation_state == "Enabled"
- pod.id == pod_origin.id
- pod.start_ip == "10.100.10.102"
- pod.end_ip == "10.100.10.254"
- pod.gateway == "10.100.10.1"
- pod.netmask == "255.255.255.0"
- pod.zone == "{{ cs_resource_prefix }}-zone"
- name: test absent pod
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
state: absent
register: pod
- name: verify test create pod
assert:
that:
- pod|changed
- pod.id == pod_origin.id
- pod.allocation_state == "Enabled"
- pod.start_ip == "10.100.10.102"
- pod.end_ip == "10.100.10.254"
- pod.gateway == "10.100.10.1"
- pod.netmask == "255.255.255.0"
- pod.zone == "{{ cs_resource_prefix }}-zone"
- name: test absent pod idempotence
cs_pod:
name: "{{ cs_resource_prefix }}-pod"
zone: "{{ cs_resource_prefix }}-zone"
state: absent
register: pod
- name: verify test absent pod idempotence
assert:
that:
- not pod|changed

@ -0,0 +1,3 @@
---
dependencies:
- test_cs_common

@ -0,0 +1,76 @@
---
- name: setup cpu limits account
cs_resourcelimit:
type: cpu
limit: 20
account: "{{ cs_resource_prefix }}_user"
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify setup cpu limits account
assert:
that:
- rl|success
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.account == "{{ cs_resource_prefix }}_user"
- rl.limit == 20
- rl.resource_type == "cpu"
- name: set cpu limits for domain
cs_resourcelimit:
type: cpu
limit: 12
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify set cpu limits for domain
assert:
that:
- rl|changed
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.limit == 12
- rl.resource_type == "cpu"
- name: set cpu limits for domain idempotence
cs_resourcelimit:
type: cpu
limit: 12
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify set cpu limits for domain
assert:
that:
- not rl|changed
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.limit == 12
- rl.resource_type == "cpu"
- name: set cpu limits for account
cs_resourcelimit:
type: cpu
limit: 10
account: "{{ cs_resource_prefix }}_user"
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify set cpu limits for account
assert:
that:
- rl|changed
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.account == "{{ cs_resource_prefix }}_user"
- rl.limit == 10
- rl.resource_type == "cpu"
- name: set cpu limits for account idempotence
cs_resourcelimit:
type: cpu
limit: 10
account: "{{ cs_resource_prefix }}_user"
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify set cpu limits for account idempotence
assert:
that:
- not rl|changed
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.account == "{{ cs_resource_prefix }}_user"
- rl.limit == 10
- rl.resource_type == "cpu"

@ -0,0 +1,76 @@
---
- name: setup instance limits account
cs_resourcelimit:
type: instance
limit: 20
account: "{{ cs_resource_prefix }}_user"
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify setup instance limits account
assert:
that:
- rl|success
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.account == "{{ cs_resource_prefix }}_user"
- rl.limit == 20
- rl.resource_type == "instance"
- name: set instance limits for domain
cs_resourcelimit:
type: instance
limit: 12
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify set instance limits for domain
assert:
that:
- rl|changed
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.limit == 12
- rl.resource_type == "instance"
- name: set instance limits for domain idempotence
cs_resourcelimit:
type: instance
limit: 12
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify set instance limits for domain
assert:
that:
- not rl|changed
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.limit == 12
- rl.resource_type == "instance"
- name: set instance limits for account
cs_resourcelimit:
type: instance
limit: 10
account: "{{ cs_resource_prefix }}_user"
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify set instance limits for account
assert:
that:
- rl|changed
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.account == "{{ cs_resource_prefix }}_user"
- rl.limit == 10
- rl.resource_type == "instance"
- name: set instance limits for account idempotence
cs_resourcelimit:
type: instance
limit: 10
account: "{{ cs_resource_prefix }}_user"
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify set instance limits for account idempotence
assert:
that:
- not rl|changed
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.account == "{{ cs_resource_prefix }}_user"
- rl.limit == 10
- rl.resource_type == "instance"

@ -0,0 +1,61 @@
---
- name: setup domain
cs_domain: path={{ cs_resource_prefix }}-domain
register: dom
- name: verify setup domain
assert:
that:
- dom|success
- name: setup account
cs_account:
name: "{{ cs_resource_prefix }}_user"
username: "{{ cs_resource_prefix }}_username"
password: "{{ cs_resource_prefix }}_password"
last_name: "{{ cs_resource_prefix }}_last_name"
first_name: "{{ cs_resource_prefix }}_first_name"
email: "{{ cs_resource_prefix }}@example.com"
network_domain: "{{ cs_resource_prefix }}-local"
domain: "{{ cs_resource_prefix }}-domain"
register: acc
- name: verify setup account
assert:
that:
- acc|success
- name: test failed unkonwn type
cs_resourcelimit:
type: unkonwn
limit: 20
domain: "{{ cs_resource_prefix }}-domain"
register: rl
ignore_errors: yes
- name: verify test failed unkonwn type
assert:
that:
- rl|failed
- name: test failed missing type
cs_resourcelimit:
register: rl
ignore_errors: yes
- name: verify test failed missing type
assert:
that:
- rl|failed
- name: setup resource limits domain
cs_resourcelimit:
type: instance
limit: 20
domain: "{{ cs_resource_prefix }}-domain"
register: rl
- name: verify setup resource limits domain
assert:
that:
- rl|success
- rl.domain == "{{ cs_resource_prefix }}-domain"
- rl.limit == 20
- include: instance.yml
- include: cpu.yml

@ -0,0 +1,6 @@
---
test_cs_instance_1: "{{ cs_resource_prefix }}-vm1"
test_cs_instance_2: "{{ cs_resource_prefix }}-vm2"
test_cs_instance_template: CentOS 5.3(64-bit) no GUI (Simulator)
test_cs_instance_offering_1: Small Instance
test_cs_disk_offering_1: Custom

@ -0,0 +1,3 @@
---
dependencies:
- test_cs_common

@ -0,0 +1,215 @@
---
- name: setup
cs_volume: name={{ cs_resource_prefix }}_vol state=absent
register: vol
- name: verify setup
assert:
that:
- vol|success
- name: setup instance 1
cs_instance:
name: "{{ test_cs_instance_1 }}"
template: "{{ test_cs_instance_template }}"
service_offering: "{{ test_cs_instance_offering_1 }}"
register: instance
- name: verify create instance
assert:
that:
- instance|success
- name: setup instance 2
cs_instance:
name: "{{ test_cs_instance_2 }}"
template: "{{ test_cs_instance_template }}"
service_offering: "{{ test_cs_instance_offering_1 }}"
register: instance
- name: verify create instance
assert:
that:
- instance|success
- name: test fail if missing name
action: cs_volume
register: vol
ignore_errors: true
- name: verify results of fail if missing name
assert:
that:
- vol|failed
- "vol.msg == 'missing required arguments: name'"
- name: test create volume
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
disk_offering: "{{ test_cs_disk_offering_1 }}"
size: 20
register: vol
- name: verify results test create volume
assert:
that:
- vol|changed
- vol.size == 20 * 1024 ** 3
- vol.name == "{{ cs_resource_prefix }}_vol"
- name: test create volume idempotence
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
disk_offering: "{{ test_cs_disk_offering_1 }}"
size: 20
register: vol
- name: verify results test create volume idempotence
assert:
that:
- not vol|changed
- vol.size == 20 * 1024 ** 3
- vol.name == "{{ cs_resource_prefix }}_vol"
- name: test shrink volume
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
disk_offering: "{{ test_cs_disk_offering_1 }}"
size: 10
shrink_ok: yes
register: vol
- name: verify results test create volume
assert:
that:
- vol|changed
- vol.size == 10 * 1024 ** 3
- vol.name == "{{ cs_resource_prefix }}_vol"
- name: test shrink volume idempotence
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
disk_offering: "{{ test_cs_disk_offering_1 }}"
size: 10
shrink_ok: yes
register: vol
- name: verify results test create volume
assert:
that:
- not vol|changed
- vol.size == 10 * 1024 ** 3
- vol.name == "{{ cs_resource_prefix }}_vol"
- name: test attach volume
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
vm: "{{ test_cs_instance_1 }}"
state: attached
register: vol
- name: verify results test attach volume
assert:
that:
- vol|changed
- vol.name == "{{ cs_resource_prefix }}_vol"
- vol.vm == "{{ test_cs_instance_1 }}"
- vol.attached is defined
- name: test attach volume idempotence
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
vm: "{{ test_cs_instance_1 }}"
state: attached
register: vol
- name: verify results test attach volume idempotence
assert:
that:
- not vol|changed
- vol.name == "{{ cs_resource_prefix }}_vol"
- vol.vm == "{{ test_cs_instance_1 }}"
- vol.attached is defined
- name: test attach attached volume to another vm
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
vm: "{{ test_cs_instance_2 }}"
state: attached
register: vol
- name: verify results test attach attached volume to another vm
assert:
that:
- vol|changed
- vol.name == "{{ cs_resource_prefix }}_vol"
- vol.vm == "{{ test_cs_instance_2 }}"
- vol.attached is defined
- name: test attach attached volume to another vm idempotence
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
vm: "{{ test_cs_instance_2 }}"
state: attached
register: vol
- name: verify results test attach attached volume to another vm idempotence
assert:
that:
- not vol|changed
- vol.name == "{{ cs_resource_prefix }}_vol"
- vol.vm == "{{ test_cs_instance_2 }}"
- vol.attached is defined
- name: test detach volume
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
state: detached
register: vol
- name: verify results test detach volume
assert:
that:
- vol|changed
- vol.name == "{{ cs_resource_prefix }}_vol"
- vol.attached is undefined
- name: test detach volume idempotence
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
state: detached
register: vol
- name: verify results test detach volume idempotence
assert:
that:
- not vol|changed
- vol.name == "{{ cs_resource_prefix }}_vol"
- vol.attached is undefined
- name: test delete volume
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
state: absent
register: vol
- name: verify results test create volume
assert:
that:
- vol|changed
- vol.name == "{{ cs_resource_prefix }}_vol"
- name: test delete volume idempotence
cs_volume:
name: "{{ cs_resource_prefix }}_vol"
state: absent
register: vol
- name: verify results test delete volume idempotence
assert:
that:
- not vol|changed
- name: cleanup instance 1
cs_instance:
name: "{{ test_cs_instance_1 }}"
state: absent
register: instance
- name: verify create instance
assert:
that:
- instance|success
- name: cleanup instance 2
cs_instance:
name: "{{ test_cs_instance_2 }}"
state: absent
register: instance
- name: verify create instance
assert:
that:
- instance|success

@ -248,7 +248,7 @@
that: that:
- 'item.changed == true' - 'item.changed == true'
- 'item.state == "file"' - 'item.state == "file"'
with_items: file16_result.results with_items: "{{file16_result.results}}"
- name: try to force the sub-directory to a link - name: try to force the sub-directory to a link
file: src={{output_dir}}/testing dest={{output_dir}}/sub1 state=link force=yes file: src={{output_dir}}/testing dest={{output_dir}}/sub1 state=link force=yes

@ -68,3 +68,12 @@
- '"0.10 GB" == 102400000|human_readable(unit="G")' - '"0.10 GB" == 102400000|human_readable(unit="G")'
- '"0.10 Gb" == 102400000|human_readable(isbits=True, unit="G")' - '"0.10 Gb" == 102400000|human_readable(isbits=True, unit="G")'
- name: Container lookups with extract
assert:
that:
- "'x' == [0]|map('extract',['x','y'])|list|first"
- "'y' == [1]|map('extract',['x','y'])|list|first"
- "42 == ['x']|map('extract',{'x':42,'y':31})|list|first"
- "31 == ['x','y']|map('extract',{'x':42,'y':31})|list|last"
- "'local' == ['localhost']|map('extract',hostvars,'ansible_connection')|list|first"
- "'local' == ['localhost']|map('extract',hostvars,['ansible_connection'])|list|first"

@ -146,6 +146,11 @@
when: "{{ not python_has_ssl_context }}" when: "{{ not python_has_ssl_context }}"
# End hacky SNI test section # End hacky SNI test section
- name: Test get_url with redirect
get_url:
url: 'http://httpbin.org/redirect/6'
dest: "{{ output_dir }}/redirect.json"
- name: Test that setting file modes work - name: Test that setting file modes work
get_url: get_url:
url: 'http://httpbin.org/' url: 'http://httpbin.org/'
@ -178,4 +183,4 @@
assert: assert:
that: that:
- "result.changed == true" - "result.changed == true"
- "stat_result.stat.mode == '0070'" - "stat_result.stat.mode == '0070'"

@ -88,7 +88,7 @@
- name: remove known_host files - name: remove known_host files
file: state=absent path={{ item }} file: state=absent path={{ item }}
with_items: known_host_files with_items: "{{known_host_files}}"
- name: checkout ssh://git@github.com repo without accept_hostkey (expected fail) - name: checkout ssh://git@github.com repo without accept_hostkey (expected fail)
git: repo={{ repo_format2 }} dest={{ checkout_dir }} git: repo={{ repo_format2 }} dest={{ checkout_dir }}

@ -194,3 +194,23 @@
- assert: - assert:
that: that:
- "'www.kennethreitz.org' in web_data" - "'www.kennethreitz.org' in web_data"
- name: Test cartesian lookup
debug: var={{item}}
with_cartesian:
- ["A", "B", "C"]
- ["1", "2", "3"]
register: product
- name: Verify cartesian lookup
assert:
that:
- product.results[0]['item'] == ["A", "1"]
- product.results[1]['item'] == ["A", "2"]
- product.results[2]['item'] == ["A", "3"]
- product.results[3]['item'] == ["B", "1"]
- product.results[4]['item'] == ["B", "2"]
- product.results[5]['item'] == ["B", "3"]
- product.results[6]['item'] == ["C", "1"]
- product.results[7]['item'] == ["C", "2"]
- product.results[8]['item'] == ["C", "3"]

@ -96,7 +96,7 @@
mysql_user: name={{ item[0] }} priv={{ item[1] }}.*:ALL append_privs=yes password={{ user_password_1 }} mysql_user: name={{ item[0] }} priv={{ item[1] }}.*:ALL append_privs=yes password={{ user_password_1 }}
with_nested: with_nested:
- [ '{{ user_name_1 }}' , '{{ user_name_2 }}'] - [ '{{ user_name_1 }}' , '{{ user_name_2 }}']
- db_names - "{{db_names}}"
- name: show grants access for user1 on multiple database - name: show grants access for user1 on multiple database
command: mysql "-e SHOW GRANTS FOR '{{ user_name_1 }}'@'localhost';" command: mysql "-e SHOW GRANTS FOR '{{ user_name_1 }}'@'localhost';"
@ -104,7 +104,7 @@
- name: assert grant access for user1 on multiple database - name: assert grant access for user1 on multiple database
assert: { that: "'{{ item }}' in result.stdout" } assert: { that: "'{{ item }}' in result.stdout" }
with_items: db_names with_items: "{{db_names}}"
- name: show grants access for user2 on multiple database - name: show grants access for user2 on multiple database
command: mysql "-e SHOW GRANTS FOR '{{ user_name_2 }}'@'localhost';" command: mysql "-e SHOW GRANTS FOR '{{ user_name_2 }}'@'localhost';"

@ -195,7 +195,7 @@
# Verify mysql_variable fails with an incorrect login_host parameter # Verify mysql_variable fails with an incorrect login_host parameter
# #
- name: query mysql_variable using incorrect login_host - name: query mysql_variable using incorrect login_host
mysql_variables: variable=wait_timeout login_host=12.0.0.9 mysql_variables: variable=wait_timeout login_host=12.0.0.9 connect_timeout=5
register: result register: result
ignore_errors: true ignore_errors: true

@ -69,8 +69,8 @@
- '"json" in item.1' - '"json" in item.1'
- item.0.stat.checksum == item.1.content | checksum - item.0.stat.checksum == item.1.content | checksum
with_together: with_together:
- pass_checksum.results - "{{pass_checksum.results}}"
- pass.results - "{{pass.results}}"
- name: checksum fail_json - name: checksum fail_json
@ -89,8 +89,8 @@
- item.0.stat.checksum == item.1.content | checksum - item.0.stat.checksum == item.1.content | checksum
- '"json" not in item.1' - '"json" not in item.1'
with_together: with_together:
- fail_checksum.results - "{{fail_checksum.results}}"
- fail.results - "{{fail.results}}"
- name: test https fetch to a site with mismatched hostname and certificate - name: test https fetch to a site with mismatched hostname and certificate
uri: uri:
@ -153,6 +153,28 @@
that: that:
- 'result.location|default("") == "http://httpbin.org/relative-redirect/1"' - 'result.location|default("") == "http://httpbin.org/relative-redirect/1"'
- name: Check SSL with redirect
uri:
url: 'https://httpbin.org/redirect/2'
register: result
- name: Assert SSL with redirect
assert:
that:
- 'result.url|default("") == "https://httpbin.org/get"'
- name: redirect to bad SSL site
uri:
url: 'http://wrong.host.badssl.com'
register: result
ignore_errors: true
- name: Ensure bad SSL site reidrect fails
assert:
that:
- result|failed
- '"wrong.host.badssl.com" in result.msg'
- name: test basic auth - name: test basic auth
uri: uri:
url: 'http://httpbin.org/basic-auth/user/passwd' url: 'http://httpbin.org/basic-auth/user/passwd'

@ -183,7 +183,7 @@
# assert: # assert:
# that: # that:
# - "{{item.stat.mode}} == 0700" # - "{{item.stat.mode}} == 0700"
# with_items: dir_stats.results # with_items: "{{dir_stats.results}}"
# errors on this aren't presently ignored so this test is commented out. But it would be nice to fix. # errors on this aren't presently ignored so this test is commented out. But it would be nice to fix.

@ -183,7 +183,7 @@
that: that:
- 'item.changed == true' - 'item.changed == true'
# - 'item.state == "file"' # - 'item.state == "file"'
with_items: file16_result.results with_items: "{{file16_result.results}}"
#- name: try to force the sub-directory to a link #- name: try to force the sub-directory to a link
# win_file: src={{win_output_dir}}/testing dest={{win_output_dir}}/sub1 state=link force=yes # win_file: src={{win_output_dir}}/testing dest={{win_output_dir}}/sub1 state=link force=yes

@ -0,0 +1,3 @@
dependencies:
- prepare_win_tests

@ -0,0 +1,133 @@
# test code for the win_regmerge module
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# clear the area of the registry we are using for tests
- name: remove setting
win_regedit:
key: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp'
state: absent
# copy over some registry files to work with
- name: copy over some registry files to work with
win_copy: src={{item}} dest={{win_output_dir}}\\{{item}}
with_items:
- settings1.reg
- settings2.reg
- settings3.reg
# test 1 - basic test of changed behaviour
# merge in REG_SZ
- name: test 1 merge in a setting
win_regmerge:
path: "{{win_output_dir}}\\settings1.reg"
register: merge11_result
- assert:
that:
- "merge11_result.changed == true"
# re run the merge
- name: test 1 merge in the setting again
win_regmerge:
path: "{{win_output_dir}}\\settings1.reg"
register: merge12_result
# without a compare to key, should allways report changed
- assert:
that:
- "merge12_result.changed == true"
# assert changed false
# prune reg key
- name: test 1 remove setting
win_regedit:
key: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp'
state: absent
#
# test 2, observe behaviour when compare_to param is set
#
- name: test 2 merge in a setting
win_regmerge:
path: "{{win_output_dir}}\\settings1.reg"
compare_to: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp\Moosic\ILikeToMooveIt'
register: merge21_result
- assert:
that:
- "merge21_result.changed == true"
# re run the merge
- name: test 2 merge in the setting again but with compare_key
win_regmerge:
path: "{{win_output_dir}}\\settings1.reg"
compare_to: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp\Moosic\ILikeToMooveIt'
register: merge22_result
# with a compare to key, should now report not changed
- assert:
that:
- "merge22_result.changed == false"
# assert changed false
# prune the contents of the registry from the parent of the compare key downwards
- name: test 2 clean up remove setting
win_regedit:
key: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp'
state: absent
# test 3 merge in more complex settings
- name: test 3 merge in a setting
win_regmerge:
path: "{{win_output_dir}}\\settings3.reg"
compare_to: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp\Moo Monitor'
register: merge31_result
- assert:
that:
- "merge31_result.changed == true"
# re run the merge
- name: test 3 merge in the setting again but with compare_key check
win_regmerge:
path: "{{win_output_dir}}\\settings3.reg"
compare_to: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp\Moo Monitor'
register: merge32_result
# with a compare to key, should now report not changed
- assert:
that:
- "merge32_result.changed == false"
# assert changed false
# prune the contents of the registry from the compare key downwards
- name: test 3 clean up remove setting
win_regedit:
key: 'HKLM:\SOFTWARE\Wow6432Node\Cow Corp'
state: absent
# clean up registry files
- name: clean up registry files
win_file: path={{win_output_dir}}\\{{item}} state=absent
with_items:
- settings1.reg
- settings2.reg
- settings3.reg
# END OF win_regmerge tests

@ -0,0 +1,4 @@
#jinja2: newline_sequence:'\r\n'
{{ templated_var }}
{{ templated_var }}
{{ templated_var }}

@ -0,0 +1 @@
templated_var: templated_var_loaded

@ -33,17 +33,17 @@
- name: set block always run flag - name: set block always run flag
set_fact: set_fact:
block_always_run: true block_always_run: true
- block: #- block:
- meta: noop # - meta: noop
always: # always:
- name: set nested block always run flag # - name: set nested block always run flag
set_fact: # set_fact:
nested_block_always_run: true # nested_block_always_run: true
- name: fail in always # - name: fail in always
fail: # fail:
- name: tasks flag should not be set after failure in always # - name: tasks flag should not be set after failure in always
set_fact: # set_fact:
always_run_after_failure: true # always_run_after_failure: true
- meta: clear_host_errors - meta: clear_host_errors
post_tasks: post_tasks:
@ -52,7 +52,7 @@
- block_tasks_run - block_tasks_run
- block_rescue_run - block_rescue_run
- block_always_run - block_always_run
- nested_block_always_run #- nested_block_always_run
- not tasks_run_after_failure - not tasks_run_after_failure
- not rescue_run_after_failure - not rescue_run_after_failure
- not always_run_after_failure - not always_run_after_failure
@ -84,7 +84,7 @@
include: fail.yml include: fail.yml
args: args:
msg: "failed from rescue" msg: "failed from rescue"
- name: tasks flag should not be set after failure in rescue - name: flag should not be set after failure in rescue
set_fact: set_fact:
rescue_run_after_failure: true rescue_run_after_failure: true
always: always:

@ -0,0 +1,56 @@
[local]
local-pipelining ansible_ssh_pipelining=true
local-no-pipelining ansible_ssh_pipelining=false
[local:vars]
ansible_host=localhost
ansible_connection=local
[chroot]
chroot-pipelining ansible_ssh_pipelining=true
chroot-no-pipelining ansible_ssh_pipelining=false
[chroot:vars]
ansible_host=/
ansible_connection=chroot
[docker]
docker-pipelining ansible_ssh_pipelining=true
docker-no-pipelining ansible_ssh_pipelining=false
[docker:vars]
ansible_host=ubuntu-latest
ansible_connection=docker
[libvirt_lxc]
libvirt_lxc-pipelining ansible_ssh_pipelining=true
libvirt_lxc-no-pipelining ansible_ssh_pipelining=false
[libvirt_lxc:vars]
ansible_host=lv-ubuntu-wily-amd64
ansible_connection=libvirt_lxc
[jail]
jail-pipelining ansible_ssh_pipelining=true
jail-no-pipelining ansible_ssh_pipelining=false
[jail:vars]
ansible_host=freebsd_10_2
ansible_connection=jail
ansible_python_interpreter=/usr/local/bin/python
[ssh]
ssh-pipelining ansible_ssh_pipelining=true
ssh-no-pipelining ansible_ssh_pipelining=false
[ssh:vars]
ansible_host=localhost
ansible_connection=ssh
[paramiko_ssh]
paramiko_ssh-pipelining ansible_ssh_pipelining=true
paramiko_ssh-no-pipelining ansible_ssh_pipelining=false
[paramiko_ssh:vars]
ansible_host=localhost
ansible_connection=paramiko_ssh
[skip-during-build:children]
docker
libvirt_lxc
jail
ssh
paramiko_ssh

@ -0,0 +1,37 @@
- hosts: all
gather_facts: no
serial: 1
tasks:
### raw with unicode arg and output
- name: raw with unicode arg and output
raw: echo 汉语
register: command
- name: check output of raw with unicode arg and output
assert: { that: "'汉语' in command.stdout" }
### copy local file with unicode filename and content
- name: create local file with unicode filename and content
local_action: lineinfile dest=/tmp/ansible-local-汉语/汉语.txt create=true line=汉语
- name: remove remote file with unicode filename and content
file: path=/tmp/ansible-remote-汉语/汉语.txt state=absent
- name: create remote directory with unicode name
file: path=/tmp/ansible-remote-汉语 state=directory
- name: copy local file with unicode filename and content
copy: src=/tmp/ansible-local-汉语/汉语.txt dest=/tmp/ansible-remote-汉语/汉语.txt
### fetch remote file with unicode filename and content
- name: remove local file with unicode filename and content
local_action: file path=/tmp/ansible-local-汉语/汉语.txt state=absent
- name: fetch remote file with unicode filename and content
fetch: src=/tmp/ansible-remote-汉语/汉语.txt dest=/tmp/ansible-local-汉语/汉语.txt fail_on_missing=true validate_checksum=true flat=true
### remove local and remote temp files
- name: remove local temp file
local_action: file path=/tmp/ansible-local-汉语 state=absent
- name: remove remote temp file
file: path=/tmp/ansible-remote-汉语 state=absent

@ -0,0 +1,133 @@
---
- hosts: facthost0
tags: [ 'fact_min' ]
connection: local
gather_subset: "all"
gather_facts: yes
tasks:
- setup:
register: facts
- debug: var=facts
- name: Test that retrieving all facts works
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" != "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" != "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" != "UNDEF_VIRT"'
- hosts: facthost1
tags: [ 'fact_min' ]
connection: local
gather_subset: "!all"
gather_facts: yes
tasks:
- name: Test that only retrieving minimal facts work
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" == "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" == "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" == "UNDEF_VIRT"'
- hosts: facthost2
tags: [ 'fact_network' ]
connection: local
gather_subset: "network"
gather_facts: yes
tasks:
- name: Test that retrieving network facts work
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" != "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" == "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" == "UNDEF_VIRT"'
- hosts: facthost3
tags: [ 'fact_hardware' ]
connection: local
gather_subset: "hardware"
gather_facts: yes
tasks:
- name: Test that retrieving hardware facts work
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" == "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" != "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" == "UNDEF_VIRT"'
- hosts: facthost4
tags: [ 'fact_virtual' ]
connection: local
gather_subset: "virtual"
gather_facts: yes
tasks:
- name: Test that retrieving virtualization facts work
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" == "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" == "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" != "UNDEF_VIRT"'
- hosts: facthost5
tags: [ 'fact_comma_string' ]
connection: local
gather_subset: "virtual,network"
gather_facts: yes
tasks:
- name: Test that retrieving virtualization and network as a string works
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" != "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" == "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" != "UNDEF_VIRT"'
- hosts: facthost6
tags: [ 'fact_yaml_list' ]
connection: local
gather_subset:
- virtual
- network
gather_facts: yes
tasks:
- name: Test that retrieving virtualization and network as a string works
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" != "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" == "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" != "UNDEF_VIRT"'
- hosts: facthost7
tags: [ 'fact_negation' ]
connection: local
gather_subset: "!hardware"
gather_facts: yes
tasks:
- name: Test that negation of fact subsets work
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" != "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" == "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" != "UNDEF_VIRT"'
- hosts: facthost8
tags: [ 'fact_mixed_negation_addition' ]
connection: local
gather_subset: "!hardware,network"
gather_facts: yes
tasks:
- name: Test that negation and additional subsets work together
assert:
that:
- '"{{ ansible_user_id|default("UNDEF_MIN") }}" != "UNDEF_MIN"'
- '"{{ ansible_interfaces|default("UNDEF_NET") }}" != "UNDEF_NET"'
- '"{{ ansible_mounts|default("UNDEF_HW") }}" == "UNDEF_HW"'
- '"{{ ansible_virtualization_role|default("UNDEF_VIRT") }}" == "UNDEF_VIRT"'

@ -0,0 +1,26 @@
- hosts: testhost
gather_facts: no
tags:
- always
tasks:
- name: ensure fail action produces a failing result
fail:
ignore_errors: yes
register: fail_out
- debug:
msg: fail works ({{ fail_out.failed }})
- name: ensure assert produces a failing result
assert:
that: false
ignore_errors: yes
register: assert_out
- debug:
msg: assert works ({{ assert_out.failed }})
- name: ensure fail action stops execution
fail:
msg: fail actually failed (this is expected)

@ -37,4 +37,5 @@
- { role: test_win_copy, tags: test_win_copy } - { role: test_win_copy, tags: test_win_copy }
- { role: test_win_template, tags: test_win_template } - { role: test_win_template, tags: test_win_template }
- { role: test_win_lineinfile, tags: test_win_lineinfile } - { role: test_win_lineinfile, tags: test_win_lineinfile }
- { role: test_win_regmerge, tags: test_win_regmerge }

@ -23,8 +23,9 @@ from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.play_iterator import PlayIterator from ansible.executor.play_iterator import HostState, PlayIterator
from ansible.playbook import Playbook from ansible.playbook import Playbook
from ansible.playbook.task import Task
from ansible.playbook.play_context import PlayContext from ansible.playbook.play_context import PlayContext
from units.mock.loader import DictDataLoader from units.mock.loader import DictDataLoader
@ -37,6 +38,23 @@ class TestPlayIterator(unittest.TestCase):
def tearDown(self): def tearDown(self):
pass pass
def test_host_state(self):
hs = HostState(blocks=[x for x in range(0, 10)])
hs.tasks_child_state = HostState(blocks=[0])
hs.rescue_child_state = HostState(blocks=[1])
hs.always_child_state = HostState(blocks=[2])
hs.__repr__()
hs.run_state = 100
hs.__repr__()
hs.fail_state = 15
hs.__repr__()
for i in range(0, 10):
hs.cur_block = i
self.assertEqual(hs.get_current_block(), i)
new_hs = hs.copy()
def test_play_iterator(self): def test_play_iterator(self):
fake_loader = DictDataLoader({ fake_loader = DictDataLoader({
"test_play.yml": """ "test_play.yml": """
@ -48,6 +66,18 @@ class TestPlayIterator(unittest.TestCase):
- debug: msg="this is a pre_task" - debug: msg="this is a pre_task"
tasks: tasks:
- debug: msg="this is a regular task" - debug: msg="this is a regular task"
- block:
- debug: msg="this is a block task"
- block:
- debug: msg="this is a sub-block in a block"
rescue:
- debug: msg="this is a rescue task"
- block:
- debug: msg="this is a sub-block in a rescue"
always:
- debug: msg="this is an always task"
- block:
- debug: msg="this is a sub-block in an always"
post_tasks: post_tasks:
- debug: msg="this is a post_task" - debug: msg="this is a post_task"
""", """,
@ -64,10 +94,12 @@ class TestPlayIterator(unittest.TestCase):
hosts = [] hosts = []
for i in range(0, 10): for i in range(0, 10):
host = MagicMock() host = MagicMock()
host.get_name.return_value = 'host%02d' % i host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host) hosts.append(host)
mock_var_manager._fact_cache['host00'] = dict()
inventory = MagicMock() inventory = MagicMock()
inventory.get_hosts.return_value = hosts inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts inventory.filter_hosts.return_value = hosts
@ -82,6 +114,16 @@ class TestPlayIterator(unittest.TestCase):
all_vars=dict(), all_vars=dict(),
) )
# lookup up an original task
target_task = p._entries[0].tasks[0].block[0]
task_copy = target_task.copy(exclude_block=True)
found_task = itr.get_original_task(hosts[0], task_copy)
self.assertEqual(target_task, found_task)
bad_task = Task()
found_task = itr.get_original_task(hosts[0], bad_task)
self.assertIsNone(found_task)
# pre task # pre task
(host_state, task) = itr.get_next_task_for_host(hosts[0]) (host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task) self.assertIsNotNone(task)
@ -100,6 +142,38 @@ class TestPlayIterator(unittest.TestCase):
self.assertIsNotNone(task) self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug') self.assertEqual(task.action, 'debug')
self.assertIsNone(task._role) self.assertIsNone(task._role)
# block task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a block task"))
# sub-block task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in a block"))
# mark the host failed
itr.mark_host_failed(hosts[0])
# block rescue task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a rescue task"))
# sub-block rescue task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in a rescue"))
# block always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is an always task"))
# sub-block always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in an always"))
# implicit meta: flush_handlers # implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0]) (host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task) self.assertIsNotNone(task)
@ -116,3 +190,183 @@ class TestPlayIterator(unittest.TestCase):
(host_state, task) = itr.get_next_task_for_host(hosts[0]) (host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNone(task) self.assertIsNone(task)
# host 0 shouldn't be in the failed hosts, as the error
# was handled by a rescue block
failed_hosts = itr.get_failed_hosts()
self.assertNotIn(hosts[0], failed_hosts)
def test_play_iterator_nested_blocks(self):
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
gather_facts: false
tasks:
- block:
- block:
- block:
- block:
- block:
- debug: msg="this is the first task"
- ping:
rescue:
- block:
- block:
- block:
- block:
- debug: msg="this is the rescue task"
always:
- block:
- block:
- block:
- block:
- debug: msg="this is the always task"
""",
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
self.assertEqual(task.args, dict(_raw_params='flush_handlers'))
# get the first task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the first task'))
# fail the host
itr.mark_host_failed(hosts[0])
# get the resuce task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the rescue task'))
# get the always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the always task'))
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
self.assertEqual(task.args, dict(_raw_params='flush_handlers'))
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
self.assertEqual(task.args, dict(_raw_params='flush_handlers'))
# end of iteration
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNone(task)
def test_play_iterator_add_tasks(self):
fake_loader = DictDataLoader({
'test_play.yml': """
- hosts: all
gather_facts: no
tasks:
- debug: msg="dummy task"
""",
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# test the high-level add_tasks() method
s = HostState(blocks=[0,1,2])
itr._insert_tasks_into_state = MagicMock(return_value=s)
itr.add_tasks(hosts[0], [3,4,5])
self.assertEqual(itr._host_states[hosts[0].name], s)
# now actually test the lower-level method that does the work
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# iterate past first task
_, task = itr.get_next_task_for_host(hosts[0])
while(task and task.action != 'debug'):
_, task = itr.get_next_task_for_host(hosts[0])
if task is None:
raise Exception("iterated past end of play while looking for place to insert tasks")
# get the current host state and copy it so we can mutate it
s = itr.get_host_state(hosts[0])
s_copy = s.copy()
# assert with an empty task list, or if we're in a failed state, we simply return the state as-is
res_state = itr._insert_tasks_into_state(s_copy, task_list=[])
self.assertEqual(res_state, s_copy)
s_copy.fail_state = itr.FAILED_TASKS
res_state = itr._insert_tasks_into_state(s_copy, task_list=[MagicMock()])
self.assertEqual(res_state, s_copy)
# but if we've failed with a rescue/always block
mock_task = MagicMock()
s_copy.run_state = itr.ITERATING_RESCUE
res_state = itr._insert_tasks_into_state(s_copy, task_list=[mock_task])
self.assertEqual(res_state, s_copy)
self.assertIn(mock_task, res_state._blocks[res_state.cur_block].rescue)
itr._host_states[hosts[0].name] = res_state
(next_state, next_task) = itr.get_next_task_for_host(hosts[0], peek=True)
self.assertEqual(next_task, mock_task)
itr._host_states[hosts[0].name] = s
# test a regular insertion
s_copy = s.copy()
res_state = itr._insert_tasks_into_state(s_copy, task_list=[MagicMock()])

@ -0,0 +1,130 @@
# (c) 2016, James Cammarata <jimi@sngx.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.executor.task_result import TaskResult
class TestTaskResult(unittest.TestCase):
def test_task_result_basic(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test loading a result with a dict
tr = TaskResult(mock_host, mock_task, dict())
# test loading a result with a JSON string
with patch('ansible.parsing.dataloader.DataLoader.load') as p:
tr = TaskResult(mock_host, mock_task, '{}')
def test_task_result_is_changed(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no changed in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_changed())
# test with changed in the result
tr = TaskResult(mock_host, mock_task, dict(changed=True))
self.assertTrue(tr.is_changed())
# test with multiple results but none changed
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_changed())
# test with multiple results and one changed
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(changed=False), dict(changed=True), dict(some_key=False)]))
self.assertTrue(tr.is_changed())
def test_task_result_is_skipped(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no skipped in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_skipped())
# test with skipped in the result
tr = TaskResult(mock_host, mock_task, dict(skipped=True))
self.assertTrue(tr.is_skipped())
# test with multiple results but none skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_skipped())
# test with multiple results and one skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(skipped=False), dict(skipped=True), dict(some_key=False)]))
self.assertFalse(tr.is_skipped())
# test with multiple results and all skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(skipped=True), dict(skipped=True), dict(skipped=True)]))
self.assertTrue(tr.is_skipped())
def test_task_result_is_unreachable(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no unreachable in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_unreachable())
# test with unreachable in the result
tr = TaskResult(mock_host, mock_task, dict(unreachable=True))
self.assertTrue(tr.is_unreachable())
# test with multiple results but none unreachable
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_unreachable())
# test with multiple results and one unreachable
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(unreachable=False), dict(unreachable=True), dict(some_key=False)]))
self.assertTrue(tr.is_unreachable())
def test_task_result_is_failed(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no failed in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_failed())
# test failed result with rc values
tr = TaskResult(mock_host, mock_task, dict(rc=0))
self.assertFalse(tr.is_failed())
tr = TaskResult(mock_host, mock_task, dict(rc=1))
self.assertTrue(tr.is_failed())
# test with failed in result
tr = TaskResult(mock_host, mock_task, dict(failed=True))
self.assertTrue(tr.is_failed())
# test with failed_when in result
tr = TaskResult(mock_host, mock_task, dict(failed_when_result=True))
self.assertTrue(tr.is_failed())

@ -0,0 +1,58 @@
# -*- coding: utf-8 -*-
# (c) 2016, James Cammarata <jimi@sngx.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import json
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import MagicMock
class TestModuleUtilsBasic(unittest.TestCase):
def test_module_utils_basic__log_invocation(self):
from ansible.module_utils import basic
# test basic log invocation
basic.MODULE_COMPLEX_ARGS = json.dumps(dict(foo=False, bar=[1,2,3], bam="bam", baz=u'baz'))
am = basic.AnsibleModule(
argument_spec=dict(
foo = dict(default=True, type='bool'),
bar = dict(default=[], type='list'),
bam = dict(default="bam"),
baz = dict(default=u"baz"),
password = dict(default=True),
no_log = dict(default="you shouldn't see me", no_log=True),
),
)
am.log = MagicMock()
am._log_invocation()
am.log.assert_called_with(
'Invoked with bam=bam bar=[1, 2, 3] foo=False baz=baz no_log=NOT_LOGGING_PARAMETER password=NOT_LOGGING_PASSWORD ',
log_args={
'foo': 'False',
'bar': '[1, 2, 3]',
'bam': 'bam',
'baz': 'baz',
'password': 'NOT_LOGGING_PASSWORD',
'no_log': 'NOT_LOGGING_PARAMETER',
},
)

@ -23,9 +23,9 @@ __metaclass__ = type
import copy import copy
import json import json
import sys import sys
from io import BytesIO
from ansible.compat.tests import unittest from ansible.compat.tests import unittest
from ansible.compat.six import StringIO
from ansible.module_utils import basic from ansible.module_utils import basic
from ansible.module_utils.basic import heuristic_log_sanitize from ansible.module_utils.basic import heuristic_log_sanitize
@ -41,7 +41,7 @@ class TestAnsibleModuleExitJson(unittest.TestCase):
basic.MODULE_COMPLEX_ARGS = '{}' basic.MODULE_COMPLEX_ARGS = '{}'
self.old_stdout = sys.stdout self.old_stdout = sys.stdout
self.fake_stream = StringIO() self.fake_stream = BytesIO()
sys.stdout = self.fake_stream sys.stdout = self.fake_stream
self.module = basic.AnsibleModule(argument_spec=dict()) self.module = basic.AnsibleModule(argument_spec=dict())
@ -127,7 +127,7 @@ class TestAnsibleModuleExitValuesRemoved(unittest.TestCase):
def test_exit_json_removes_values(self): def test_exit_json_removes_values(self):
self.maxDiff = None self.maxDiff = None
for args, return_val, expected in self.dataset: for args, return_val, expected in self.dataset:
sys.stdout = StringIO() sys.stdout = BytesIO()
basic.MODULE_COMPLEX_ARGS = json.dumps(args) basic.MODULE_COMPLEX_ARGS = json.dumps(args)
module = basic.AnsibleModule( module = basic.AnsibleModule(
argument_spec = dict( argument_spec = dict(
@ -146,7 +146,7 @@ class TestAnsibleModuleExitValuesRemoved(unittest.TestCase):
expected = copy.deepcopy(expected) expected = copy.deepcopy(expected)
del expected['changed'] del expected['changed']
expected['failed'] = True expected['failed'] = True
sys.stdout = StringIO() sys.stdout = BytesIO()
basic.MODULE_COMPLEX_ARGS = json.dumps(args) basic.MODULE_COMPLEX_ARGS = json.dumps(args)
module = basic.AnsibleModule( module = basic.AnsibleModule(
argument_spec = dict( argument_spec = dict(

@ -22,16 +22,16 @@ __metaclass__ = type
import errno import errno
import sys import sys
import time import time
from io import BytesIO
from ansible.compat.tests import unittest from ansible.compat.tests import unittest
from ansible.compat.six import StringIO, BytesIO
from ansible.compat.tests.mock import call, MagicMock, Mock, patch, sentinel from ansible.compat.tests.mock import call, MagicMock, Mock, patch, sentinel
from ansible.module_utils import basic from ansible.module_utils import basic
from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import AnsibleModule
class OpenStringIO(StringIO): class OpenBytesIO(BytesIO):
"""StringIO with dummy close() method """BytesIO with dummy close() method
So that you can inspect the content after close() was called. So that you can inspect the content after close() was called.
""" """
@ -77,7 +77,7 @@ class TestAnsibleModuleRunCommand(unittest.TestCase):
self.subprocess = patch('ansible.module_utils.basic.subprocess').start() self.subprocess = patch('ansible.module_utils.basic.subprocess').start()
self.cmd = Mock() self.cmd = Mock()
self.cmd.returncode = 0 self.cmd.returncode = 0
self.cmd.stdin = OpenStringIO() self.cmd.stdin = OpenBytesIO()
self.cmd.stdout.fileno.return_value = sentinel.stdout self.cmd.stdout.fileno.return_value = sentinel.stdout
self.cmd.stderr.fileno.return_value = sentinel.stderr self.cmd.stderr.fileno.return_value = sentinel.stderr
self.subprocess.Popen.return_value = self.cmd self.subprocess.Popen.return_value = self.cmd

@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from ansible.compat.tests import unittest
class TestAnsibleModuleExitJson(unittest.TestCase):
def test_module_utils_basic_safe_eval(self):
from ansible.module_utils import basic
basic.MODULE_COMPLEX_ARGS = '{}'
am = basic.AnsibleModule(
argument_spec=dict(),
)
# test some basic usage
# string (and with exceptions included), integer, bool
self.assertEqual(am.safe_eval("'a'"), 'a')
self.assertEqual(am.safe_eval("'a'", include_exceptions=True), ('a', None))
self.assertEqual(am.safe_eval("1"), 1)
self.assertEqual(am.safe_eval("True"), True)
self.assertEqual(am.safe_eval("False"), False)
self.assertEqual(am.safe_eval("{}"), {})
# not passing in a string to convert
self.assertEqual(am.safe_eval({'a':1}), {'a':1})
self.assertEqual(am.safe_eval({'a':1}, include_exceptions=True), ({'a':1}, None))
# invalid literal eval
self.assertEqual(am.safe_eval("a=1"), "a=1")
res = am.safe_eval("a=1", include_exceptions=True)
self.assertEqual(res[0], "a=1")
self.assertEqual(type(res[1]), SyntaxError)
self.assertEqual(am.safe_eval("a.foo()"), "a.foo()")
res = am.safe_eval("a.foo()", include_exceptions=True)
self.assertEqual(res[0], "a.foo()")
self.assertEqual(res[1], None)
self.assertEqual(am.safe_eval("import foo"), "import foo")
res = am.safe_eval("import foo", include_exceptions=True)
self.assertEqual(res[0], "import foo")
self.assertEqual(res[1], None)
self.assertEqual(am.safe_eval("__import__('foo')"), "__import__('foo')")
res = am.safe_eval("__import__('foo')", include_exceptions=True)
self.assertEqual(res[0], "__import__('foo')")
self.assertEqual(type(res[1]), ValueError)

@ -21,12 +21,18 @@ from __future__ import (absolute_import, division)
__metaclass__ = type __metaclass__ = type
import errno import errno
import os
import sys import sys
from six.moves import builtins try:
import builtins
except ImportError:
import __builtin__ as builtins
from ansible.compat.tests import unittest from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock, mock_open, Mock from ansible.compat.tests.mock import patch, MagicMock, mock_open, Mock, call
realimport = builtins.__import__
class TestModuleUtilsBasic(unittest.TestCase): class TestModuleUtilsBasic(unittest.TestCase):
@ -36,17 +42,114 @@ class TestModuleUtilsBasic(unittest.TestCase):
def tearDown(self): def tearDown(self):
pass pass
def test_module_utils_basic_imports(self): def clear_modules(self, mods):
realimport = builtins.__import__ for mod in mods:
if mod in sys.modules:
del sys.modules[mod]
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_syslog(self, mock_import):
def _mock_import(name, *args, **kwargs):
if name == 'syslog':
raise ImportError
return realimport(name, *args, **kwargs)
self.clear_modules(['syslog', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.assertTrue(mod.module_utils.basic.HAS_SYSLOG)
self.clear_modules(['syslog', 'ansible.module_utils.basic'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.basic')
self.assertFalse(mod.module_utils.basic.HAS_SYSLOG)
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_selinux(self, mock_import):
def _mock_import(name, *args, **kwargs):
if name == 'selinux':
raise ImportError
return realimport(name, *args, **kwargs)
try:
self.clear_modules(['selinux', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.assertTrue(mod.module_utils.basic.HAVE_SELINUX)
except ImportError:
# no selinux on test system, so skip
pass
self.clear_modules(['selinux', 'ansible.module_utils.basic'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.basic')
self.assertFalse(mod.module_utils.basic.HAVE_SELINUX)
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_json(self, mock_import):
def _mock_import(name, *args, **kwargs): def _mock_import(name, *args, **kwargs):
if name == 'json': if name == 'json':
raise ImportError() raise ImportError
realimport(name, *args, **kwargs) elif name == 'simplejson':
return MagicMock()
return realimport(name, *args, **kwargs)
with patch.object(builtins, '__import__', _mock_import, create=True) as m: self.clear_modules(['json', 'ansible.module_utils.basic'])
m('ansible.module_utils.basic') mod = builtins.__import__('ansible.module_utils.basic')
builtins.__import__('ansible.module_utils.basic')
self.clear_modules(['json', 'ansible.module_utils.basic'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.basic')
# FIXME: doesn't work yet
#@patch.object(builtins, 'bytes')
#def test_module_utils_basic_bytes(self, mock_bytes):
# mock_bytes.side_effect = NameError()
# from ansible.module_utils import basic
@patch.object(builtins, '__import__')
@unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is not supported on targets (yet)")
def test_module_utils_basic_import_literal_eval(self, mock_import):
def _mock_import(name, *args, **kwargs):
try:
fromlist = kwargs.get('fromlist', args[2])
except IndexError:
fromlist = []
if name == 'ast' and 'literal_eval' in fromlist:
raise ImportError
return realimport(name, *args, **kwargs)
mock_import.side_effect = _mock_import
self.clear_modules(['ast', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.assertEqual(mod.module_utils.basic.literal_eval("'1'"), "1")
self.assertEqual(mod.module_utils.basic.literal_eval("1"), 1)
self.assertEqual(mod.module_utils.basic.literal_eval("-1"), -1)
self.assertEqual(mod.module_utils.basic.literal_eval("(1,2,3)"), (1,2,3))
self.assertEqual(mod.module_utils.basic.literal_eval("[1]"), [1])
self.assertEqual(mod.module_utils.basic.literal_eval("True"), True)
self.assertEqual(mod.module_utils.basic.literal_eval("False"), False)
self.assertEqual(mod.module_utils.basic.literal_eval("None"), None)
#self.assertEqual(mod.module_utils.basic.literal_eval('{"a": 1}'), dict(a=1))
self.assertRaises(ValueError, mod.module_utils.basic.literal_eval, "asdfasdfasdf")
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_systemd_journal(self, mock_import):
def _mock_import(name, *args, **kwargs):
try:
fromlist = kwargs.get('fromlist', args[2])
except IndexError:
fromlist = []
if name == 'systemd' and 'journal' in fromlist:
raise ImportError
return realimport(name, *args, **kwargs)
self.clear_modules(['systemd', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.assertTrue(mod.module_utils.basic.has_journal)
self.clear_modules(['systemd', 'ansible.module_utils.basic'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.basic')
self.assertFalse(mod.module_utils.basic.has_journal)
def test_module_utils_basic_get_platform(self): def test_module_utils_basic_get_platform(self):
with patch('platform.system', return_value='foo'): with patch('platform.system', return_value='foo'):
@ -60,19 +163,19 @@ class TestModuleUtilsBasic(unittest.TestCase):
self.assertEqual(get_distribution(), None) self.assertEqual(get_distribution(), None)
with patch('platform.system', return_value='Linux'): with patch('platform.system', return_value='Linux'):
with patch('platform.linux_distribution', return_value=("foo", "1", "One")): with patch('platform.linux_distribution', return_value=["foo"]):
self.assertEqual(get_distribution(), "Foo") self.assertEqual(get_distribution(), "Foo")
with patch('os.path.isfile', return_value=True): with patch('os.path.isfile', return_value=True):
def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1): with patch('platform.linux_distribution', side_effect=[("AmazonFooBar",)]):
if supported_dists != ():
return ("AmazonFooBar", "", "")
else:
return ("", "", "")
with patch('platform.linux_distribution', side_effect=_dist):
self.assertEqual(get_distribution(), "Amazonfoobar") self.assertEqual(get_distribution(), "Amazonfoobar")
with patch('platform.linux_distribution', side_effect=(("",), ("AmazonFooBam",))):
self.assertEqual(get_distribution(), "Amazon")
with patch('platform.linux_distribution', side_effect=[("",),("",)]):
self.assertEqual(get_distribution(), "OtherLinux")
def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1): def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1):
if supported_dists != (): if supported_dists != ():
return ("Bar", "2", "Two") return ("Bar", "2", "Two")
@ -678,17 +781,269 @@ class TestModuleUtilsBasic(unittest.TestCase):
self.assertEqual(am.set_mode_if_different('/path/to/file', 0o660, False), True) self.assertEqual(am.set_mode_if_different('/path/to/file', 0o660, False), True)
am.check_mode = False am.check_mode = False
original_hasattr = hasattr
def _hasattr(obj, name):
if obj == os and name == 'lchmod':
return False
return original_hasattr(obj, name)
# FIXME: this isn't working yet # FIXME: this isn't working yet
#with patch('os.lstat', side_effect=[mock_stat1, mock_stat2]): with patch('os.lstat', side_effect=[mock_stat1, mock_stat2]):
# with patch('os.lchmod', return_value=None) as m_os: with patch.object(builtins, 'hasattr', side_effect=_hasattr):
# del m_os.lchmod with patch('os.path.islink', return_value=False):
# with patch('os.path.islink', return_value=False): with patch('os.chmod', return_value=None) as m_chmod:
# with patch('os.chmod', return_value=None) as m_chmod: self.assertEqual(am.set_mode_if_different('/path/to/file/no_lchmod', 0o660, False), True)
# self.assertEqual(am.set_mode_if_different('/path/to/file/no_lchmod', 0o660, False), True) with patch('os.lstat', side_effect=[mock_stat1, mock_stat2]):
# m_chmod.assert_called_with('/path/to/file', 0o660) with patch.object(builtins, 'hasattr', side_effect=_hasattr):
# with patch('os.path.islink', return_value=True): with patch('os.path.islink', return_value=True):
# with patch('os.chmod', return_value=None) as m_chmod: with patch('os.chmod', return_value=None) as m_chmod:
# with patch('os.stat', return_value=mock_stat2): with patch('os.stat', return_value=mock_stat2):
# self.assertEqual(am.set_mode_if_different('/path/to/file', 0o660, False), True) self.assertEqual(am.set_mode_if_different('/path/to/file', 0o660, False), True)
# m_chmod.assert_called_with('/path/to/file', 0o660)
@patch('tempfile.NamedTemporaryFile')
@patch('os.umask')
@patch('shutil.copyfileobj')
@patch('shutil.move')
@patch('shutil.copy2')
@patch('os.rename')
@patch('pwd.getpwuid')
@patch('os.getuid')
@patch('os.environ')
@patch('os.getlogin')
@patch('os.chown')
@patch('os.chmod')
@patch('os.stat')
@patch('os.path.exists')
def test_module_utils_basic_ansible_module_atomic_move(
self,
_os_path_exists,
_os_stat,
_os_chmod,
_os_chown,
_os_getlogin,
_os_environ,
_os_getuid,
_pwd_getpwuid,
_os_rename,
_shutil_copy2,
_shutil_move,
_shutil_copyfileobj,
_os_umask,
_tempfile_NamedTemporaryFile,
):
from ansible.module_utils import basic
basic.MODULE_COMPLEX_ARGS = '{}'
am = basic.AnsibleModule(
argument_spec = dict(),
)
environ = dict()
_os_environ.__getitem__ = environ.__getitem__
_os_environ.__setitem__ = environ.__setitem__
am.selinux_enabled = MagicMock()
am.selinux_context = MagicMock()
am.selinux_default_context = MagicMock()
am.set_context_if_different = MagicMock()
# test destination does not exist, no selinux, login name = 'root',
# no environment, os.rename() succeeds
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
am.selinux_enabled.return_value = False
_os_chmod.reset_mock()
_os_chown.reset_mock()
am.set_context_if_different.reset_mock()
am.atomic_move('/path/to/src', '/path/to/dest')
_os_rename.assert_called_with('/path/to/src', '/path/to/dest')
self.assertEqual(_os_chmod.call_args_list, [call('/path/to/dest', basic.DEFAULT_PERM & ~18)])
# same as above, except selinux_enabled
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
mock_context = MagicMock()
am.selinux_default_context.return_value = mock_context
am.selinux_enabled.return_value = True
_os_chmod.reset_mock()
_os_chown.reset_mock()
am.set_context_if_different.reset_mock()
am.selinux_default_context.reset_mock()
am.atomic_move('/path/to/src', '/path/to/dest')
_os_rename.assert_called_with('/path/to/src', '/path/to/dest')
self.assertEqual(_os_chmod.call_args_list, [call('/path/to/dest', basic.DEFAULT_PERM & ~18)])
self.assertEqual(am.selinux_default_context.call_args_list, [call('/path/to/dest')])
self.assertEqual(am.set_context_if_different.call_args_list, [call('/path/to/dest', mock_context, False)])
# now with dest present, no selinux, also raise OSError when using
# os.getlogin() to test corner case with no tty
_os_path_exists.side_effect = [True, True]
_os_getlogin.side_effect = OSError()
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
environ['LOGNAME'] = 'root'
stat1 = MagicMock()
stat1.st_mode = 0o0644
stat1.st_uid = 0
stat1.st_gid = 0
_os_stat.side_effect = [stat1,]
am.selinux_enabled.return_value = False
_os_chmod.reset_mock()
_os_chown.reset_mock()
am.set_context_if_different.reset_mock()
am.atomic_move('/path/to/src', '/path/to/dest')
_os_rename.assert_called_with('/path/to/src', '/path/to/dest')
# dest missing, selinux enabled
_os_path_exists.side_effect = [True, True]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
stat1 = MagicMock()
stat1.st_mode = 0o0644
stat1.st_uid = 0
stat1.st_gid = 0
_os_stat.side_effect = [stat1,]
mock_context = MagicMock()
am.selinux_context.return_value = mock_context
am.selinux_enabled.return_value = True
_os_chmod.reset_mock()
_os_chown.reset_mock()
am.set_context_if_different.reset_mock()
am.selinux_default_context.reset_mock()
am.atomic_move('/path/to/src', '/path/to/dest')
_os_rename.assert_called_with('/path/to/src', '/path/to/dest')
self.assertEqual(am.selinux_context.call_args_list, [call('/path/to/dest')])
self.assertEqual(am.set_context_if_different.call_args_list, [call('/path/to/dest', mock_context, False)])
# now testing with exceptions raised
# have os.stat raise OSError which is not EPERM
_os_stat.side_effect = OSError()
_os_path_exists.side_effect = [True, True]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
self.assertRaises(OSError, am.atomic_move, '/path/to/src', '/path/to/dest')
# and now have os.stat return EPERM, which should not fail
_os_stat.side_effect = OSError(errno.EPERM, 'testing os stat with EPERM')
_os_path_exists.side_effect = [True, True]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
# FIXME: we don't assert anything here yet
am.atomic_move('/path/to/src', '/path/to/dest')
# now we test os.rename() raising errors...
# first we test with a bad errno to verify it bombs out
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_umask.side_effect = [18, 0]
_os_rename.side_effect = OSError(errno.EIO, 'failing with EIO')
self.assertRaises(SystemExit, am.atomic_move, '/path/to/src', '/path/to/dest')
# next we test with EPERM so it continues to the alternate code for moving
# test with NamedTemporaryFile raising an error first
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_umask.side_effect = [18, 0]
_os_rename.side_effect = [OSError(errno.EPERM, 'failing with EPERM'), None]
_tempfile_NamedTemporaryFile.return_value = None
_tempfile_NamedTemporaryFile.side_effect = OSError()
am.selinux_enabled.return_value = False
self.assertRaises(SystemExit, am.atomic_move, '/path/to/src', '/path/to/dest')
# then test with it creating a temp file
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_umask.side_effect = [18, 0]
_os_rename.side_effect = [OSError(errno.EPERM, 'failing with EPERM'), None]
mock_stat1 = MagicMock()
mock_stat2 = MagicMock()
mock_stat3 = MagicMock()
_os_stat.return_value = [mock_stat1, mock_stat2, mock_stat3]
_os_stat.side_effect = None
mock_tempfile = MagicMock()
mock_tempfile.name = '/path/to/tempfile'
_tempfile_NamedTemporaryFile.return_value = mock_tempfile
_tempfile_NamedTemporaryFile.side_effect = None
am.selinux_enabled.return_value = False
# FIXME: we don't assert anything here yet
am.atomic_move('/path/to/src', '/path/to/dest')
# same as above, but with selinux enabled
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_umask.side_effect = [18, 0]
_os_rename.side_effect = [OSError(errno.EPERM, 'failing with EPERM'), None]
mock_tempfile = MagicMock()
_tempfile_NamedTemporaryFile.return_value = mock_tempfile
mock_context = MagicMock()
am.selinux_default_context.return_value = mock_context
am.selinux_enabled.return_value = True
am.atomic_move('/path/to/src', '/path/to/dest')
def test_module_utils_basic_ansible_module__symbolic_mode_to_octal(self):
from ansible.module_utils import basic
basic.MODULE_COMPLEX_ARGS = '{}'
am = basic.AnsibleModule(
argument_spec = dict(),
)
mock_stat = MagicMock()
# FIXME: trying many more combinations here would be good
# directory, give full perms to all, then one group at a time
mock_stat.st_mode = 0o040000
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'a+rwx'), 0o0777)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u+rwx,g+rwx,o+rwx'), 0o0777)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'o+rwx'), 0o0007)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'g+rwx'), 0o0070)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u+rwx'), 0o0700)
# same as above, but in reverse so removing permissions
mock_stat.st_mode = 0o040777
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'a-rwx'), 0o0000)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u-rwx,g-rwx,o-rwx'), 0o0000)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'o-rwx'), 0o0770)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'g-rwx'), 0o0707)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u-rwx'), 0o0077)
# now using absolute assignment
mock_stat.st_mode = 0o040000
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'a=rwx'), 0o0777)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u=rwx,g=rwx,o=rwx'), 0o0777)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'o=rwx'), 0o0007)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'g=rwx'), 0o0070)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u=rwx'), 0o0700)
# invalid modes
mock_stat.st_mode = 0o0400000
self.assertRaises(ValueError, am._symbolic_mode_to_octal, mock_stat, 'a=foo')

@ -20,8 +20,9 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
from io import StringIO
from six import text_type, binary_type from six import text_type, binary_type
from six.moves import StringIO
from collections import Sequence, Set, Mapping from collections import Sequence, Set, Mapping
from ansible.compat.tests import unittest from ansible.compat.tests import unittest
@ -35,6 +36,13 @@ except ImportError:
from yaml.parser import ParserError from yaml.parser import ParserError
class NameStringIO(StringIO):
"""In py2.6, StringIO doesn't let you set name because a baseclass has it
as readonly property"""
name = None
def __init__(self, *args, **kwargs):
super(NameStringIO, self).__init__(*args, **kwargs)
class TestAnsibleLoaderBasic(unittest.TestCase): class TestAnsibleLoaderBasic(unittest.TestCase):
def setUp(self): def setUp(self):
@ -44,7 +52,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
pass pass
def test_parse_number(self): def test_parse_number(self):
stream = StringIO(""" stream = StringIO(u"""
1 1
""") """)
loader = AnsibleLoader(stream, 'myfile.yml') loader = AnsibleLoader(stream, 'myfile.yml')
@ -53,7 +61,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
# No line/column info saved yet # No line/column info saved yet
def test_parse_string(self): def test_parse_string(self):
stream = StringIO(""" stream = StringIO(u"""
Ansible Ansible
""") """)
loader = AnsibleLoader(stream, 'myfile.yml') loader = AnsibleLoader(stream, 'myfile.yml')
@ -64,7 +72,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17)) self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17))
def test_parse_utf8_string(self): def test_parse_utf8_string(self):
stream = StringIO(""" stream = StringIO(u"""
Cafè Eñyei Cafè Eñyei
""") """)
loader = AnsibleLoader(stream, 'myfile.yml') loader = AnsibleLoader(stream, 'myfile.yml')
@ -75,7 +83,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17)) self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17))
def test_parse_dict(self): def test_parse_dict(self):
stream = StringIO(""" stream = StringIO(u"""
webster: daniel webster: daniel
oed: oxford oed: oxford
""") """)
@ -93,7 +101,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
self.assertEqual(data[u'oed'].ansible_pos, ('myfile.yml', 3, 22)) self.assertEqual(data[u'oed'].ansible_pos, ('myfile.yml', 3, 22))
def test_parse_list(self): def test_parse_list(self):
stream = StringIO(""" stream = StringIO(u"""
- a - a
- b - b
""") """)
@ -109,7 +117,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
self.assertEqual(data[1].ansible_pos, ('myfile.yml', 3, 19)) self.assertEqual(data[1].ansible_pos, ('myfile.yml', 3, 19))
def test_parse_short_dict(self): def test_parse_short_dict(self):
stream = StringIO("""{"foo": "bar"}""") stream = StringIO(u"""{"foo": "bar"}""")
loader = AnsibleLoader(stream, 'myfile.yml') loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data() data = loader.get_single_data()
self.assertEqual(data, dict(foo=u'bar')) self.assertEqual(data, dict(foo=u'bar'))
@ -117,7 +125,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
self.assertEqual(data.ansible_pos, ('myfile.yml', 1, 1)) self.assertEqual(data.ansible_pos, ('myfile.yml', 1, 1))
self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 1, 9)) self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 1, 9))
stream = StringIO("""foo: bar""") stream = StringIO(u"""foo: bar""")
loader = AnsibleLoader(stream, 'myfile.yml') loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data() data = loader.get_single_data()
self.assertEqual(data, dict(foo=u'bar')) self.assertEqual(data, dict(foo=u'bar'))
@ -126,12 +134,12 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 1, 6)) self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 1, 6))
def test_error_conditions(self): def test_error_conditions(self):
stream = StringIO("""{""") stream = StringIO(u"""{""")
loader = AnsibleLoader(stream, 'myfile.yml') loader = AnsibleLoader(stream, 'myfile.yml')
self.assertRaises(ParserError, loader.get_single_data) self.assertRaises(ParserError, loader.get_single_data)
def test_front_matter(self): def test_front_matter(self):
stream = StringIO("""---\nfoo: bar""") stream = StringIO(u"""---\nfoo: bar""")
loader = AnsibleLoader(stream, 'myfile.yml') loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data() data = loader.get_single_data()
self.assertEqual(data, dict(foo=u'bar')) self.assertEqual(data, dict(foo=u'bar'))
@ -140,7 +148,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 2, 6)) self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 2, 6))
# Initial indent (See: #6348) # Initial indent (See: #6348)
stream = StringIO(""" - foo: bar\n baz: qux""") stream = StringIO(u""" - foo: bar\n baz: qux""")
loader = AnsibleLoader(stream, 'myfile.yml') loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data() data = loader.get_single_data()
self.assertEqual(data, [{u'foo': u'bar', u'baz': u'qux'}]) self.assertEqual(data, [{u'foo': u'bar', u'baz': u'qux'}])
@ -154,7 +162,7 @@ class TestAnsibleLoaderBasic(unittest.TestCase):
class TestAnsibleLoaderPlay(unittest.TestCase): class TestAnsibleLoaderPlay(unittest.TestCase):
def setUp(self): def setUp(self):
stream = StringIO(""" stream = NameStringIO(u"""
- hosts: localhost - hosts: localhost
vars: vars:
number: 1 number: 1

@ -1,4 +1,4 @@
#!/usr/bin/python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# (c) 2015, Florian Apolloner <florian@apolloner.eu> # (c) 2015, Florian Apolloner <florian@apolloner.eu>
# #
@ -21,15 +21,128 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import ast
import json
import pipes
import os import os
from sys import version_info
try:
import builtins
except ImportError:
import __builtin__ as builtins
from ansible import __version__ as ansible_version
from ansible import constants as C from ansible import constants as C
from ansible.errors import AnsibleError from ansible.compat.six import text_type
from ansible.compat.tests import unittest from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock, Mock from ansible.compat.tests.mock import patch, MagicMock, mock_open
from ansible.errors import AnsibleError
from ansible.playbook.play_context import PlayContext from ansible.playbook.play_context import PlayContext
from ansible.plugins import PluginLoader
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
from ansible.template import Templar
from ansible.utils.unicode import to_bytes
from units.mock.loader import DictDataLoader
python_module_replacers = b"""
#!/usr/bin/python
#ANSIBLE_VERSION = "<<ANSIBLE_VERSION>>"
#MODULE_ARGS = "<<INCLUDE_ANSIBLE_MODULE_ARGS>>"
#MODULE_COMPLEX_ARGS = "<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>"
#SELINUX_SPECIAL_FS="<<SELINUX_SPECIAL_FILESYSTEMS>>"
test = u'Toshio \u304f\u3089\u3068\u307f'
from ansible.module_utils.basic import *
"""
powershell_module_replacers = b"""
WINDOWS_ARGS = "<<INCLUDE_ANSIBLE_MODULE_WINDOWS_ARGS>>"
# POWERSHELL_COMMON
"""
# Prior to 3.4.4, mock_open cannot handle binary read_data
if version_info >= (3,) and version_info < (3, 4, 4):
file_spec = None
def _iterate_read_data(read_data):
# Helper for mock_open:
# Retrieve lines from read_data via a generator so that separate calls to
# readline, read, and readlines are properly interleaved
sep = b'\n' if isinstance(read_data, bytes) else '\n'
data_as_list = [l + sep for l in read_data.split(sep)]
if data_as_list[-1] == sep:
# If the last line ended in a newline, the list comprehension will have an
# extra entry that's just a newline. Remove this.
data_as_list = data_as_list[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# emulated doesn't have a newline to end the last line remove the
# newline that our naive format() added
data_as_list[-1] = data_as_list[-1][:-1]
for line in data_as_list:
yield line
def mock_open(mock=None, read_data=''):
"""
A helper function to create a mock to replace the use of `open`. It works
for `open` called directly or used as a context manager.
The `mock` argument is the mock object to configure. If `None` (the
default) then a `MagicMock` will be created for you, with the API limited
to methods or attributes available on standard file handles.
`read_data` is a string for the `read` methoddline`, and `readlines` of the
file handle to return. This is an empty string by default.
"""
def _readlines_side_effect(*args, **kwargs):
if handle.readlines.return_value is not None:
return handle.readlines.return_value
return list(_data)
def _read_side_effect(*args, **kwargs):
if handle.read.return_value is not None:
return handle.read.return_value
return type(read_data)().join(_data)
def _readline_side_effect():
if handle.readline.return_value is not None:
while True:
yield handle.readline.return_value
for line in _data:
yield line
global file_spec
if file_spec is None:
import _io
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
if mock is None:
mock = MagicMock(name='open', spec=open)
handle = MagicMock(spec=file_spec)
handle.__enter__.return_value = handle
_data = _iterate_read_data(read_data)
handle.write.return_value = None
handle.read.return_value = None
handle.readline.return_value = None
handle.readlines.return_value = None
handle.read.side_effect = _read_side_effect
handle.readline.side_effect = _readline_side_effect()
handle.readlines.side_effect = _readlines_side_effect
mock.return_value = handle
return mock
class DerivedActionBase(ActionBase): class DerivedActionBase(ActionBase):
@ -41,11 +154,137 @@ class DerivedActionBase(ActionBase):
class TestActionBase(unittest.TestCase): class TestActionBase(unittest.TestCase):
class DerivedActionBase(ActionBase): def test_action_base_run(self):
def run(self, tmp=None, task_vars=None): mock_task = MagicMock()
# We're not testing the plugin run() method, just the helper mock_task.action = "foo"
# methods ActionBase defines mock_task.args = dict(a=1, b=2, c=3)
return dict()
mock_connection = MagicMock()
play_context = PlayContext()
mock_task.async = None
action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
results = action_base.run()
self.assertEqual(results, dict())
mock_task.async = 0
action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
results = action_base.run()
self.assertEqual(results, dict(invocation=dict(module_name='foo', module_args=dict(a=1, b=2, c=3))))
def test_action_base__configure_module(self):
fake_loader = DictDataLoader({
})
# create our fake task
mock_task = MagicMock()
mock_task.action = "copy"
# create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock()
# create a mock shared loader object
def mock_find_plugin(name, options):
if name == 'badmodule':
return None
elif '.ps1' in options:
return '/fake/path/to/%s.ps1' % name
else:
return '/fake/path/to/%s' % name
mock_module_loader = MagicMock()
mock_module_loader.find_plugin.side_effect = mock_find_plugin
mock_shared_obj_loader = MagicMock()
mock_shared_obj_loader.module_loader = mock_module_loader
# we're using a real play context here
play_context = PlayContext()
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=fake_loader,
templar=None,
shared_loader_obj=mock_shared_obj_loader,
)
# test python module formatting
with patch.object(builtins, 'open', mock_open(read_data=to_bytes(python_module_replacers.strip(), encoding='utf-8'))) as m:
mock_task.args = dict(a=1, foo='fö〩')
mock_connection.module_implementation_preferences = ('',)
(style, shebang, data) = action_base._configure_module(mock_task.action, mock_task.args)
self.assertEqual(style, "new")
self.assertEqual(shebang, b"#!/usr/bin/python")
# test module not found
self.assertRaises(AnsibleError, action_base._configure_module, 'badmodule', mock_task.args)
# test powershell module formatting
with patch.object(builtins, 'open', mock_open(read_data=to_bytes(powershell_module_replacers.strip(), encoding='utf-8'))) as m:
mock_task.action = 'win_copy'
mock_task.args = dict(b=2)
mock_connection.module_implementation_preferences = ('.ps1',)
(style, shebang, data) = action_base._configure_module('stat', mock_task.args)
self.assertEqual(style, "new")
self.assertEqual(shebang, None)
# test module not found
self.assertRaises(AnsibleError, action_base._configure_module, 'badmodule', mock_task.args)
def test_action_base__compute_environment_string(self):
fake_loader = DictDataLoader({
})
# create our fake task
mock_task = MagicMock()
mock_task.action = "copy"
mock_task.args = dict(a=1)
# create a mock connection, so we don't actually try and connect to things
def env_prefix(**args):
return ' '.join(['%s=%s' % (k, pipes.quote(text_type(v))) for k,v in args.items()])
mock_connection = MagicMock()
mock_connection._shell.env_prefix.side_effect = env_prefix
# we're using a real play context here
play_context = PlayContext()
# and we're using a real templar here too
templar = Templar(loader=fake_loader)
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=fake_loader,
templar=templar,
shared_loader_obj=None,
)
# test standard environment setup
mock_task.environment = [dict(FOO='foo'), None]
env_string = action_base._compute_environment_string()
self.assertEqual(env_string, "FOO=foo")
# test where environment is not a list
mock_task.environment = dict(FOO='foo')
env_string = action_base._compute_environment_string()
self.assertEqual(env_string, "FOO=foo")
# test environment with a variable in it
templar.set_available_variables(variables=dict(the_var='bar'))
mock_task.environment = [dict(FOO='{{the_var}}')]
env_string = action_base._compute_environment_string()
self.assertEqual(env_string, "FOO=bar")
# test with a bad environment set
mock_task.environment = dict(FOO='foo')
mock_task.environment = ['hi there']
self.assertRaises(AnsibleError, action_base._compute_environment_string)
def test_action_base__early_needs_tmp_path(self): def test_action_base__early_needs_tmp_path(self):
# create our fake task # create our fake task
@ -240,8 +479,8 @@ class TestActionBase(unittest.TestCase):
self.assertEqual(action_base._transfer_data('/path/to/remote/file', 'some data'), '/path/to/remote/file') self.assertEqual(action_base._transfer_data('/path/to/remote/file', 'some data'), '/path/to/remote/file')
self.assertEqual(action_base._transfer_data('/path/to/remote/file', 'some mixed data: fö〩'), '/path/to/remote/file') self.assertEqual(action_base._transfer_data('/path/to/remote/file', 'some mixed data: fö〩'), '/path/to/remote/file')
self.assertEqual(action_base._transfer_data('/path/to/remote/file', dict(some_key=u'some value')), '/path/to/remote/file') self.assertEqual(action_base._transfer_data('/path/to/remote/file', dict(some_key='some value')), '/path/to/remote/file')
self.assertEqual(action_base._transfer_data('/path/to/remote/file', dict(some_key=u'fö〩')), '/path/to/remote/file') self.assertEqual(action_base._transfer_data('/path/to/remote/file', dict(some_key='fö〩')), '/path/to/remote/file')
mock_afo.write.side_effect = Exception() mock_afo.write.side_effect = Exception()
self.assertRaises(AnsibleError, action_base._transfer_data, '/path/to/remote/file', '') self.assertRaises(AnsibleError, action_base._transfer_data, '/path/to/remote/file', '')
@ -367,19 +606,19 @@ class TestActionBase(unittest.TestCase):
def test_action_base_sudo_only_if_user_differs(self): def test_action_base_sudo_only_if_user_differs(self):
play_context = PlayContext() play_context = PlayContext()
action_base = self.DerivedActionBase(None, None, play_context, None, None, None) action_base = DerivedActionBase(None, None, play_context, None, None, None)
action_base._connection = Mock(exec_command=Mock(return_value=(0, '', ''))) action_base._connection = MagicMock(exec_command=MagicMock(return_value=(0, '', '')))
play_context.become = True play_context.become = True
play_context.become_user = play_context.remote_user = 'root' play_context.become_user = play_context.remote_user = 'root'
play_context.make_become_cmd = Mock(return_value='CMD') play_context.make_become_cmd = MagicMock(return_value='CMD')
action_base._low_level_execute_command('ECHO', sudoable=True) action_base._low_level_execute_command('ECHO', sudoable=True)
play_context.make_become_cmd.assert_not_called() play_context.make_become_cmd.assert_not_called()
play_context.remote_user = 'apo' play_context.remote_user = 'apo'
action_base._low_level_execute_command('ECHO', sudoable=True) action_base._low_level_execute_command('ECHO', sudoable=True, executable='/bin/csh')
play_context.make_become_cmd.assert_called_once_with("ECHO", executable='/bin/sh') play_context.make_become_cmd.assert_called_once_with("ECHO", executable='/bin/csh')
play_context.make_become_cmd.reset_mock() play_context.make_become_cmd.reset_mock()
@ -388,6 +627,6 @@ class TestActionBase(unittest.TestCase):
try: try:
play_context.remote_user = 'root' play_context.remote_user = 'root'
action_base._low_level_execute_command('ECHO SAME', sudoable=True) action_base._low_level_execute_command('ECHO SAME', sudoable=True)
play_context.make_become_cmd.assert_called_once_with("ECHO SAME", executable='/bin/sh') play_context.make_become_cmd.assert_called_once_with("ECHO SAME", executable=None)
finally: finally:
C.BECOME_ALLOW_SAME_USER = become_allow_same_user C.BECOME_ALLOW_SAME_USER = become_allow_same_user

@ -0,0 +1,82 @@
# (c) 2012-2014, Chris Meyers <chris.meyers.fsu@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from six import PY3
from copy import deepcopy
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, mock_open
from ansible.plugins.callback import CallbackBase
import ansible.plugins.callback as callish
class TestCopyResultExclude(unittest.TestCase):
def setUp(self):
class DummyClass():
def __init__(self):
self.bar = [ 1, 2, 3 ]
self.a = {
"b": 2,
"c": 3,
}
self.b = {
"c": 3,
"d": 4,
}
self.foo = DummyClass()
self.cb = CallbackBase()
def tearDown(self):
pass
def test_copy_logic(self):
res = self.cb._copy_result_exclude(self.foo, ())
self.assertEqual(self.foo.bar, res.bar)
def test_copy_deep(self):
res = self.cb._copy_result_exclude(self.foo, ())
self.assertNotEqual(id(self.foo.bar), id(res.bar))
def test_no_exclude(self):
res = self.cb._copy_result_exclude(self.foo, ())
self.assertEqual(self.foo.bar, res.bar)
self.assertEqual(self.foo.a, res.a)
self.assertEqual(self.foo.b, res.b)
def test_exclude(self):
res = self.cb._copy_result_exclude(self.foo, ['bar', 'b'])
self.assertIsNone(res.bar)
self.assertIsNone(res.b)
self.assertEqual(self.foo.a, res.a)
def test_result_unmodified(self):
bar_id = id(self.foo.bar)
a_id = id(self.foo.a)
res = self.cb._copy_result_exclude(self.foo, ['bar', 'a'])
self.assertEqual(self.foo.bar, [ 1, 2, 3 ])
self.assertEqual(bar_id, id(self.foo.bar))
self.assertEqual(self.foo.a, dict(b=2, c=3))
self.assertEqual(a_id, id(self.foo.a))
self.assertRaises(AttributeError, self.cb._copy_result_exclude, self.foo, ['a', 'c', 'bar'])

@ -19,7 +19,7 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
from six import StringIO from io import StringIO
from ansible.compat.tests import unittest from ansible.compat.tests import unittest
from ansible.playbook.play_context import PlayContext from ansible.playbook.play_context import PlayContext

@ -0,0 +1,368 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pipes
import sys
from io import StringIO
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock, mock_open
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
from ansible.utils.unicode import to_bytes, to_unicode
class TestConnectionBaseClass(unittest.TestCase):
def test_plugins_connection_ssh_basic(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
# connect just returns self, so assert that
res = conn._connect()
self.assertEqual(conn, res)
ssh.SSHPASS_AVAILABLE = False
self.assertFalse(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = True
self.assertTrue(conn._sshpass_available())
with patch('subprocess.Popen') as p:
ssh.SSHPASS_AVAILABLE = None
p.return_value = MagicMock()
self.assertTrue(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = None
p.return_value = None
p.side_effect = OSError()
self.assertFalse(conn._sshpass_available())
conn.close()
self.assertFalse(conn._connected)
def test_plugins_connection_ssh__build_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command('ssh')
def test_plugins_connection_ssh__exec_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._build_command.return_value = 'ssh something something'
conn._run = MagicMock()
conn._run.return_value = (0, 'stdout', 'stderr')
res, stdout, stderr = conn._exec_command('ssh')
res, stdout, stderr = conn._exec_command('ssh', 'this is some data')
@patch('select.select')
@patch('fcntl.fcntl')
@patch('os.write')
@patch('os.close')
@patch('pty.openpty')
@patch('subprocess.Popen')
def test_plugins_connection_ssh__run(self, mock_Popen, mock_openpty, mock_osclose, mock_oswrite, mock_fcntl, mock_select):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._send_initial_data = MagicMock()
conn._examine_output = MagicMock()
conn._terminate_process = MagicMock()
conn.sshpass_pipe = [MagicMock(), MagicMock()]
mock_popen_res = MagicMock()
mock_popen_res.poll = MagicMock()
mock_popen_res.wait = MagicMock()
mock_popen_res.stdin = MagicMock()
mock_popen_res.stdin.fileno.return_value = 1000
mock_popen_res.stdout = MagicMock()
mock_popen_res.stdout.fileno.return_value = 1001
mock_popen_res.stderr = MagicMock()
mock_popen_res.stderr.fileno.return_value = 1002
mock_popen_res.return_code = 0
mock_Popen.return_value = mock_popen_res
def _mock_select(rlist, wlist, elist, timeout=None):
rvals = []
if mock_popen_res.stdin in rlist:
rvals.append(mock_popen_res.stdin)
if mock_popen_res.stderr in rlist:
rvals.append(mock_popen_res.stderr)
return (rvals, [], [])
mock_select.side_effect = _mock_select
mock_popen_res.stdout.read.side_effect = ["some data", ""]
mock_popen_res.stderr.read.side_effect = [""]
conn._run("ssh", "this is input data")
# test with a password set to trigger the sshpass write
pc.password = '12345'
mock_popen_res.stdout.read.side_effect = ["some data", "", ""]
mock_popen_res.stderr.read.side_effect = [""]
conn._run(["ssh", "is", "a", "cmd"], "this is more data")
# test with password prompting enabled
pc.password = None
pc.prompt = True
mock_popen_res.stdout.read.side_effect = ["some data", "", ""]
mock_popen_res.stderr.read.side_effect = [""]
conn._run("ssh", "this is input data")
# test with some become settings
pc.prompt = False
pc.become = True
pc.success_key = 'BECOME-SUCCESS-abcdefg'
mock_popen_res.stdout.read.side_effect = ["some data", "", ""]
mock_popen_res.stderr.read.side_effect = [""]
conn._run("ssh", "this is input data")
# simulate no data input
mock_openpty.return_value = (98, 99)
mock_popen_res.stdout.read.side_effect = ["some data", "", ""]
mock_popen_res.stderr.read.side_effect = [""]
conn._run("ssh", "")
# simulate no data input but Popen using new pty's fails
mock_Popen.return_value = None
mock_Popen.side_effect = [OSError(), mock_popen_res]
mock_popen_res.stdout.read.side_effect = ["some data", "", ""]
mock_popen_res.stderr.read.side_effect = [""]
conn._run("ssh", "")
def test_plugins_connection_ssh__examine_output(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn.check_password_prompt = MagicMock()
conn.check_become_success = MagicMock()
conn.check_incorrect_password = MagicMock()
conn.check_missing_password = MagicMock()
def _check_password_prompt(line):
if 'foo' in line:
return True
return False
def _check_become_success(line):
if 'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' in line:
return True
return False
def _check_incorrect_password(line):
if 'incorrect password' in line:
return True
return False
def _check_missing_password(line):
if 'bad password' in line:
return True
return False
conn.check_password_prompt.side_effect = _check_password_prompt
conn.check_become_success.side_effect = _check_become_success
conn.check_incorrect_password.side_effect = _check_incorrect_password
conn.check_missing_password.side_effect = _check_missing_password
# test examining output for prompt
conn._flags = dict(
become_prompt = False,
become_success = False,
become_error = False,
become_nopasswd_error = False,
)
pc.prompt = True
output, unprocessed = conn._examine_output('source', 'state', 'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False)
self.assertEqual(output, 'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, 'this should be the remainder')
self.assertTrue(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become prompt
conn._flags = dict(
become_prompt = False,
become_success = False,
become_error = False,
become_nopasswd_error = False,
)
pc.prompt = False
pc.success_key = 'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz'
output, unprocessed = conn._examine_output('source', 'state', 'line 1\nline 2\nBECOME-SUCCESS-abcdefghijklmnopqrstuvxyz\nline 3\n', False)
self.assertEqual(output, 'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, '')
self.assertFalse(conn._flags['become_prompt'])
self.assertTrue(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become failure
conn._flags = dict(
become_prompt = False,
become_success = False,
become_error = False,
become_nopasswd_error = False,
)
pc.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output('source', 'state', 'line 1\nline 2\nincorrect password\n', True)
self.assertEqual(output, 'line 1\nline 2\nincorrect password\n')
self.assertEqual(unprocessed, '')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertTrue(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for missing password
conn._flags = dict(
become_prompt = False,
become_success = False,
become_error = False,
become_nopasswd_error = False,
)
pc.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output('source', 'state', 'line 1\nbad password\n', True)
self.assertEqual(output, 'line 1\nbad password\n')
self.assertEqual(unprocessed, '')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertTrue(conn._flags['become_nopasswd_error'])
@patch('time.sleep')
def test_plugins_connection_ssh_exec_command(self, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._exec_command = MagicMock()
C.ANSIBLE_SSH_RETRIES = 9
# test a regular, successful execution
conn._exec_command.return_value = (0, 'stdout', '')
res = conn.exec_command('ssh', 'some data')
# test a retry, followed by success
conn._exec_command.return_value = None
conn._exec_command.side_effect = [(255, '', ''), (0, 'stdout', '')]
res = conn.exec_command('ssh', 'some data')
# test multiple failures
conn._exec_command.side_effect = [(255, '', '')]*10
self.assertRaises(AnsibleConnectionFailure, conn.exec_command, 'ssh', 'some data')
# test other failure from exec_command
conn._exec_command.side_effect = [Exception('bad')]*10
self.assertRaises(Exception, conn.exec_command, 'ssh', 'some data')
@patch('os.path.exists')
def test_plugins_connection_ssh_put_file(self, mock_ospe):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._run = MagicMock()
mock_ospe.return_value = True
conn._build_command.return_value = 'some command to run'
conn._run.return_value = (0, '', '')
conn.host = "some_host"
# test with C.DEFAULT_SCP_IF_SSH enabled
C.DEFAULT_SCP_IF_SSH = True
res = conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', None)
res = conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', None)
# test with C.DEFAULT_SCP_IF_SSH disabled
C.DEFAULT_SCP_IF_SSH = False
expected_in_data = b' '.join((b'put', to_bytes(pipes.quote('/path/to/in/file')), to_bytes(pipes.quote('/path/to/dest/file')))) + b'\n'
res = conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', expected_in_data)
expected_in_data = b' '.join((b'put', to_bytes(pipes.quote('/path/to/in/file/with/unicode-fö〩')), to_bytes(pipes.quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
res = conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', expected_in_data)
# test that a non-zero rc raises an error
conn._run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
# test that a not-found path raises an error
mock_ospe.return_value = False
conn._run.return_value = (0, 'stdout', '')
self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
def test_plugins_connection_ssh_fetch_file(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._run = MagicMock()
conn._build_command.return_value = 'some command to run'
conn._run.return_value = (0, '', '')
conn.host = "some_host"
# test with C.DEFAULT_SCP_IF_SSH enabled
C.DEFAULT_SCP_IF_SSH = True
res = conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', None)
res = conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', None)
# test with C.DEFAULT_SCP_IF_SSH disabled
C.DEFAULT_SCP_IF_SSH = False
expected_in_data = b' '.join((b'get', to_bytes(pipes.quote('/path/to/in/file')), to_bytes(pipes.quote('/path/to/dest/file')))) + b'\n'
res = conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', expected_in_data)
expected_in_data = b' '.join((b'get', to_bytes(pipes.quote('/path/to/in/file/with/unicode-fö〩')), to_bytes(pipes.quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
res = conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', expected_in_data)
# test that a non-zero rc raises an error
conn._run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.fetch_file, '/path/to/bad/file', '/remote/path/to/file')

@ -76,6 +76,7 @@ class TestStrategyBase(unittest.TestCase):
for i in range(0, 5): for i in range(0, 5):
mock_host = MagicMock() mock_host = MagicMock()
mock_host.name = "host%02d" % (i+1) mock_host.name = "host%02d" % (i+1)
mock_host.has_hostkey = True
mock_hosts.append(mock_host) mock_hosts.append(mock_host)
mock_inventory = MagicMock() mock_inventory = MagicMock()
@ -111,6 +112,7 @@ class TestStrategyBase(unittest.TestCase):
fake_loader = DictDataLoader() fake_loader = DictDataLoader()
mock_var_manager = MagicMock() mock_var_manager = MagicMock()
mock_host = MagicMock() mock_host = MagicMock()
mock_host.has_hostkey = True
mock_inventory = MagicMock() mock_inventory = MagicMock()
mock_options = MagicMock() mock_options = MagicMock()
mock_options.module_path = None mock_options.module_path = None
@ -171,6 +173,7 @@ class TestStrategyBase(unittest.TestCase):
mock_host = MagicMock() mock_host = MagicMock()
mock_host.name = 'test01' mock_host.name = 'test01'
mock_host.vars = dict() mock_host.vars = dict()
mock_host.has_hostkey = True
mock_task = MagicMock() mock_task = MagicMock()
mock_task._role = None mock_task._role = None
@ -348,6 +351,7 @@ class TestStrategyBase(unittest.TestCase):
mock_host = MagicMock(Host) mock_host = MagicMock(Host)
mock_host.name = "test01" mock_host.name = "test01"
mock_host.has_hostkey = True
mock_inventory = MagicMock() mock_inventory = MagicMock()
mock_inventory.get_hosts.return_value = [mock_host] mock_inventory.get_hosts.return_value = [mock_host]

@ -67,7 +67,7 @@
region: 'us-east-1' region: 'us-east-1'
instance_ids: "{{ hostvars[item]['ec2_instance_ids'] }}" instance_ids: "{{ hostvars[item]['ec2_instance_ids'] }}"
when: hostvars[item]['ec2_instance_ids'] is defined and item == inventory_hostname when: hostvars[item]['ec2_instance_ids'] is defined and item == inventory_hostname
with_items: groups['dynamic_hosts'] with_items: "{{groups['dynamic_hosts']}}"
- set_fact: - set_fact:
ansible_connection: local ansible_connection: local

@ -0,0 +1,45 @@
# Latest version of centos
FROM centos:centos6
RUN yum -y update; yum clean all;
RUN yum -y install \
epel-release \
file \
gcc \
git \
make \
mercurial \
rubygems \
sed \
subversion \
sudo \
unzip \
openssh-clients \
openssh-server \
which
RUN yum -y install \
PyYAML \
python-coverage \
python-devel \
python-httplib2 \
python-jinja2 \
python-keyczar \
python-mock \
python-nose \
python-paramiko \
python-pip \
python-setuptools \
python-virtualenv
RUN pip install --upgrade jinja2
RUN rpm -e --nodeps python-crypto; pip install pycrypto
RUN /bin/sed -i -e 's/^\(Defaults\s*requiretty\)/#--- \1/' /etc/sudoers
RUN mkdir /etc/ansible/
RUN /bin/echo -e '[local]\nlocalhost ansible_connection=local' > /etc/ansible/hosts
#VOLUME /sys/fs/cgroup /run /tmp
RUN ssh-keygen -q -t rsa1 -N '' -f /etc/ssh/ssh_host_key && \
ssh-keygen -q -t dsa -N '' -f /etc/ssh/ssh_host_dsa_key && \
ssh-keygen -q -t rsa -N '' -f /etc/ssh/ssh_host_rsa_key && \
ssh-keygen -q -t rsa -N '' -f /root/.ssh/id_rsa && \
cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys && \
for key in /etc/ssh/ssh_host_*_key.pub; do echo "localhost $(cat ${key})" >> /root/.ssh/known_hosts; done
ENV container=docker
CMD ["/sbin/init"]

@ -0,0 +1,50 @@
# Latest version of centos
FROM centos:centos7
RUN yum -y update; yum clean all; yum -y swap fakesystemd systemd
RUN (cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == systemd-tmpfiles-setup.service ] || rm -f $i; done); \
rm -f /lib/systemd/system/multi-user.target.wants/*; \
rm -f /etc/systemd/system/*.wants/*; \
rm -f /lib/systemd/system/local-fs.target.wants/*; \
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
rm -f /lib/systemd/system/basic.target.wants/*; \
rm -f /lib/systemd/system/anaconda.target.wants/*;
RUN yum -y install \
dbus-python \
epel-release \
file \
git \
iproute \
make \
mercurial \
rubygems \
subversion \
sudo \
unzip \
openssh-clients \
openssh-server \
which
RUN yum -y install \
PyYAML \
python-coverage \
python-httplib2 \
python-jinja2 \
python-keyczar \
python-mock \
python-nose \
python-paramiko \
python-pip \
python-setuptools \
python-virtualenv
RUN /usr/bin/sed -i -e 's/^\(Defaults\s*requiretty\)/#--- \1/' /etc/sudoers
RUN mkdir /etc/ansible/
RUN /usr/bin/echo -e '[local]\nlocalhost ansible_connection=local' > /etc/ansible/hosts
VOLUME /sys/fs/cgroup /run /tmp
RUN ssh-keygen -q -t rsa1 -N '' -f /etc/ssh/ssh_host_key && \
ssh-keygen -q -t dsa -N '' -f /etc/ssh/ssh_host_dsa_key && \
ssh-keygen -q -t rsa -N '' -f /etc/ssh/ssh_host_rsa_key && \
ssh-keygen -q -t rsa -N '' -f /root/.ssh/id_rsa && \
cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys && \
for key in /etc/ssh/ssh_host_*_key.pub; do echo "localhost $(cat ${key})" >> /root/.ssh/known_hosts; done
ENV container=docker
CMD ["/usr/sbin/init"]

@ -0,0 +1,54 @@
# Latest version of fedora rawhide
FROM fedora:rawhide
RUN dnf -y update; dnf clean all
RUN (cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == systemd-tmpfiles-setup.service ] || rm -f $i; done); \
rm -f /lib/systemd/system/multi-user.target.wants/*; \
rm -f /etc/systemd/system/*.wants/*; \
rm -f /lib/systemd/system/local-fs.target.wants/*; \
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
rm -f /lib/systemd/system/basic.target.wants/*; \
rm -f /lib/systemd/system/anaconda.target.wants/*;
RUN dnf -y install \
dbus-python \
file \
findutils \
git \
glibc-locale-source \
iproute \
make \
mercurial \
procps \
PyYAML \
python-coverage \
python2-dnf \
python-httplib2 \
python-jinja2 \
python-keyczar \
python-mock \
python-nose \
python-paramiko \
python-pip \
python-setuptools \
python-virtualenv \
rubygems \
subversion \
sudo \
tar \
unzip \
which \
openssh-clients \
openssh-server \
yum
RUN localedef --quiet -c -i en_US -f UTF-8 en_US.UTF-8
RUN /usr/bin/sed -i -e 's/^\(Defaults\s*requiretty\)/#--- \1/' /etc/sudoers
RUN mkdir /etc/ansible/
RUN /usr/bin/echo -e '[local]\nlocalhost ansible_connection=local' > /etc/ansible/hosts
VOLUME /sys/fs/cgroup /run /tmp
RUN ssh-keygen -q -t dsa -N '' -f /etc/ssh/ssh_host_dsa_key && \
ssh-keygen -q -t rsa -N '' -f /etc/ssh/ssh_host_rsa_key && \
ssh-keygen -q -t rsa -N '' -f /root/.ssh/id_rsa && \
cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys && \
for key in /etc/ssh/ssh_host_*_key.pub; do echo "localhost $(cat ${key})" >> /root/.ssh/known_hosts; done
ENV container=docker
CMD ["/usr/sbin/init"]

@ -0,0 +1,55 @@
# Latest version of centos
FROM fedora:23
RUN dnf -y update; dnf clean all
RUN (cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == systemd-tmpfiles-setup.service ] || rm -f $i; done); \
rm -f /lib/systemd/system/multi-user.target.wants/*; \
rm -f /etc/systemd/system/*.wants/*; \
rm -f /lib/systemd/system/local-fs.target.wants/*; \
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
rm -f /lib/systemd/system/basic.target.wants/*; \
rm -f /lib/systemd/system/anaconda.target.wants/*;
RUN dnf -y install \
dbus-python \
file \
findutils \
glibc-common \
git \
iproute \
make \
mercurial \
procps \
PyYAML \
python-coverage \
python2-dnf \
python-httplib2 \
python-jinja2 \
python-keyczar \
python-mock \
python-nose \
python-paramiko \
python-pip \
python-setuptools \
python-virtualenv \
rubygems \
subversion \
sudo \
tar \
unzip \
which \
openssh-clients \
openssh-server \
yum
RUN localedef --quiet -f ISO-8859-1 -i pt_BR pt_BR
RUN localedef --quiet -f ISO-8859-1 -i es_MX es_MX
RUN /usr/bin/sed -i -e 's/^\(Defaults\s*requiretty\)/#--- \1/' /etc/sudoers
RUN mkdir /etc/ansible/
RUN /usr/bin/echo -e '[local]\nlocalhost ansible_connection=local' > /etc/ansible/hosts
VOLUME /sys/fs/cgroup /run /tmp
RUN ssh-keygen -q -t dsa -N '' -f /etc/ssh/ssh_host_dsa_key && \
ssh-keygen -q -t rsa -N '' -f /etc/ssh/ssh_host_rsa_key && \
ssh-keygen -q -t rsa -N '' -f /root/.ssh/id_rsa && \
cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys && \
for key in /etc/ssh/ssh_host_*_key.pub; do echo "localhost $(cat ${key})" >> /root/.ssh/known_hosts; done
ENV container=docker
CMD ["/usr/sbin/init"]

@ -0,0 +1,69 @@
FROM ubuntu:precise
RUN apt-get clean; apt-get update -y;
RUN apt-get install -y \
debianutils \
gawk \
git \
locales \
make \
mercurial \
ruby \
rubygems \
subversion \
sudo \
openssh-client \
openssh-server \
unzip
# helpful things taken from the ubuntu-upstart Dockerfile:
# https://github.com/tianon/dockerfiles/blob/4d24a12b54b75b3e0904d8a285900d88d3326361/sbin-init/ubuntu/upstart/14.04/Dockerfile
ADD init-fake.conf /etc/init/fake-container-events.conf
# undo some leet hax of the base image
RUN rm /usr/sbin/policy-rc.d; \
rm /sbin/initctl; dpkg-divert --rename --remove /sbin/initctl
# remove some pointless services
RUN /usr/sbin/update-rc.d -f ondemand remove; \
for f in \
/etc/init/u*.conf \
/etc/init/mounted-dev.conf \
/etc/init/mounted-proc.conf \
/etc/init/mounted-run.conf \
/etc/init/mounted-tmp.conf \
/etc/init/mounted-var.conf \
/etc/init/hostname.conf \
/etc/init/networking.conf \
/etc/init/tty*.conf \
/etc/init/plymouth*.conf \
/etc/init/hwclock*.conf \
/etc/init/module*.conf\
; do \
dpkg-divert --local --rename --add "$f"; \
done; \
echo '# /lib/init/fstab: cleared out for bare-bones Docker' > /lib/init/fstab
# end things from ubuntu-upstart Dockerfile
RUN apt-get install -y \
python-coverage \
python-dev \
python-httplib2 \
python-jinja2 \
python-keyczar \
python-mock \
python-nose \
python-paramiko \
python-pip \
python-setuptools \
python-virtualenv \
python-yaml
RUN pip install --upgrade jinja2 pycrypto
RUN rm /etc/apt/apt.conf.d/docker-clean
RUN /bin/sed -i -e 's/^\(Defaults\s*requiretty\)/#--- \1/' /etc/sudoers
RUN mkdir /etc/ansible/
RUN /bin/echo -e "[local]\nlocalhost ansible_connection=local" > /etc/ansible/hosts
RUN locale-gen en_US.UTF-8
RUN ssh-keygen -q -t rsa -N '' -f /root/.ssh/id_rsa && \
cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys && \
for key in /etc/ssh/ssh_host_*_key.pub; do echo "localhost $(cat ${key})" >> /root/.ssh/known_hosts; done
ENV container docker
CMD ["/sbin/init"]

@ -0,0 +1,13 @@
# fake some events needed for correct startup other services
description "In-Container Upstart Fake Events"
start on startup
script
rm -rf /var/run/*.pid
rm -rf /var/run/network/*
/sbin/initctl emit stopped JOB=udevtrigger --no-wait
/sbin/initctl emit started JOB=udev --no-wait
/sbin/initctl emit runlevel RUNLEVEL=3 --no-wait
end script

@ -0,0 +1,66 @@
FROM ubuntu:trusty
RUN apt-get clean; apt-get update -y;
RUN apt-get install -y \
debianutils \
gawk \
git \
locales \
make \
mercurial \
ruby \
subversion \
sudo \
openssh-client \
openssh-server \
unzip
# helpful things taken from the ubuntu-upstart Dockerfile:
# https://github.com/tianon/dockerfiles/blob/4d24a12b54b75b3e0904d8a285900d88d3326361/sbin-init/ubuntu/upstart/14.04/Dockerfile
ADD init-fake.conf /etc/init/fake-container-events.conf
# undo some leet hax of the base image
RUN rm /usr/sbin/policy-rc.d; \
rm /sbin/initctl; dpkg-divert --rename --remove /sbin/initctl
# remove some pointless services
RUN /usr/sbin/update-rc.d -f ondemand remove; \
for f in \
/etc/init/u*.conf \
/etc/init/mounted-dev.conf \
/etc/init/mounted-proc.conf \
/etc/init/mounted-run.conf \
/etc/init/mounted-tmp.conf \
/etc/init/mounted-var.conf \
/etc/init/hostname.conf \
/etc/init/networking.conf \
/etc/init/tty*.conf \
/etc/init/plymouth*.conf \
/etc/init/hwclock*.conf \
/etc/init/module*.conf\
; do \
dpkg-divert --local --rename --add "$f"; \
done; \
echo '# /lib/init/fstab: cleared out for bare-bones Docker' > /lib/init/fstab
# end things from ubuntu-upstart Dockerfile
RUN apt-get install -y \
python-coverage \
python-httplib2 \
python-jinja2 \
python-keyczar \
python-mock \
python-nose \
python-paramiko \
python-pip \
python-setuptools \
python-virtualenv \
python-yaml
RUN rm /etc/apt/apt.conf.d/docker-clean
RUN /bin/sed -i -e 's/^\(Defaults\s*requiretty\)/#--- \1/' /etc/sudoers
RUN mkdir /etc/ansible/
RUN /bin/echo -e "[local]\nlocalhost ansible_connection=local" > /etc/ansible/hosts
RUN locale-gen en_US.UTF-8
RUN ssh-keygen -q -t rsa -N '' -f /root/.ssh/id_rsa && \
cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys && \
for key in /etc/ssh/ssh_host_*_key.pub; do echo "localhost $(cat ${key})" >> /root/.ssh/known_hosts; done
ENV container docker
CMD ["/sbin/init"]

@ -0,0 +1,13 @@
# fake some events needed for correct startup other services
description "In-Container Upstart Fake Events"
start on startup
script
rm -rf /var/run/*.pid
rm -rf /var/run/network/*
/sbin/initctl emit stopped JOB=udevtrigger --no-wait
/sbin/initctl emit started JOB=udev --no-wait
/sbin/initctl emit runlevel RUNLEVEL=3 --no-wait
end script

@ -0,0 +1,20 @@
#!/bin/sh -x
if [ "${TARGET}" = "sanity" ]; then
./test/code-smell/replace-urlopen.sh .
./test/code-smell/use-compat-six.sh lib
./test/code-smell/boilerplate.sh
./test/code-smell/required-and-default-attributes.sh
if test x"$TOXENV" != x'py24' ; then tox ; fi
if test x"$TOXENV" = x'py24' ; then python2.4 -V && python2.4 -m compileall -fq -x 'module_utils/(a10|rax|openstack|ec2|gce).py' lib/ansible/module_utils ; fi
else
set -e
export C_NAME="testAbull_$$_$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 8 | head -n 1)"
docker run -d --volume="${PWD}:/root/ansible:Z" --name "${C_NAME}" ${TARGET_OPTIONS} ansible/ansible:${TARGET} > /tmp/cid_${TARGET}
docker exec -ti $(cat /tmp/cid_${TARGET}) /bin/sh -c "export TEST_FLAGS='${TEST_FLAGS}'; cd /root/ansible; . hacking/env-setup; (cd test/integration; LC_ALL=en_US.utf-8 make)"
docker kill $(cat /tmp/cid_${TARGET})
if [ "X${TESTS_KEEP_CONTAINER}" = "X" ]; then
docker rm "${C_NAME}"
fi
fi

@ -0,0 +1,13 @@
#
# Test requirements
#
nose
mock >= 1.0.1, < 1.1
passlib
coverage
coveralls
unittest2
redis
python3-memcached
python-systemd

@ -0,0 +1,13 @@
#
# Test requirements
#
nose
mock >= 1.0.1, < 1.1
passlib
coverage
coveralls
unittest2
redis
python-memcached
python-systemd
Loading…
Cancel
Save