Overhaul ansible-test cloud test plugins. (#53044)

pull/53158/head
Matt Clay 5 years ago committed by GitHub
parent 9c644d9bcc
commit eeaff731de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -11,11 +11,12 @@
# #
# NOTE: Automatic provisioning of AWS credentials requires an ansible-core-ci API key. # NOTE: Automatic provisioning of AWS credentials requires an ansible-core-ci API key.
[default]
aws_access_key: @ACCESS_KEY aws_access_key: @ACCESS_KEY
aws_secret_key: @SECRET_KEY aws_secret_key: @SECRET_KEY
security_token: @SECURITY_TOKEN security_token: @SECURITY_TOKEN
aws_region: @REGION aws_region: @REGION
# aliases for backwards compatibility with older integration test playbooks # aliases for backwards compatibility with older integration test playbooks
ec2_access_key: '{{ aws_access_key }}' ec2_access_key: {{ aws_access_key }}
ec2_secret_key: '{{ aws_secret_key }}' ec2_secret_key: {{ aws_secret_key }}
ec2_region: '{{ aws_region }}' ec2_region: {{ aws_region }}

@ -13,6 +13,7 @@
# 1) ansible-core-ci API key in ~/.ansible-core-ci.key # 1) ansible-core-ci API key in ~/.ansible-core-ci.key
# 2) Sherlock URL (including API key) in ~/.ansible-sherlock-ci.cfg # 2) Sherlock URL (including API key) in ~/.ansible-sherlock-ci.cfg
[default]
# Provide either Service Principal or Active Directory credentials below. # Provide either Service Principal or Active Directory credentials below.
# Service Principal # Service Principal

@ -4,5 +4,6 @@
# #
# 1) Running integration tests without using ansible-test. # 1) Running integration tests without using ansible-test.
# #
[default] [default]
cloudscale_api_token = @API_TOKEN cloudscale_api_token = @API_TOKEN

@ -11,7 +11,7 @@
# #
# It is recommended that you DO NOT use this template unless you cannot use the simulator. # It is recommended that you DO NOT use this template unless you cannot use the simulator.
[cloudstack] [default]
endpoint = http://@HOST:@PORT/client/api endpoint = http://@HOST:@PORT/client/api
key = @KEY key = @KEY
secret = @SECRET secret = @SECRET

@ -11,6 +11,7 @@
# #
# It is recommended that you DO NOT use this template unless you cannot use the simulator. # It is recommended that you DO NOT use this template unless you cannot use the simulator.
[default]
gcp_project: @PROJECT gcp_project: @PROJECT
gcp_cred_file: @CRED_FILE gcp_cred_file: @CRED_FILE
gcp_cred_kind: @CRED_KIND gcp_cred_kind: @CRED_KIND

@ -11,6 +11,7 @@
# #
# It is recommended that you DO NOT use this template unless you cannot use the simulator. # It is recommended that you DO NOT use this template unless you cannot use the simulator.
[default]
gitlab_host: http://@HOST:@PORT gitlab_host: http://@HOST:@PORT
gitlab_login_token: @TOKEN gitlab_login_token: @TOKEN
gitlab_runner_registration_token: @RUNNER_TOKEN gitlab_runner_registration_token: @RUNNER_TOKEN

@ -12,7 +12,7 @@
# If you run with @FIXTURES enabled (true) then you can decide if you want to # If you run with @FIXTURES enabled (true) then you can decide if you want to
# run in @REPLAY mode (true) or, record mode (false). # run in @REPLAY mode (true) or, record mode (false).
[default]
opennebula_url: @URL opennebula_url: @URL
opennebula_username: @USERNAME opennebula_username: @USERNAME
opennebula_password: @PASSWORD opennebula_password: @PASSWORD

@ -11,7 +11,7 @@
# #
# NOTE: Automatic provisioning of Tower credentials requires an ansible-core-ci API key. # NOTE: Automatic provisioning of Tower credentials requires an ansible-core-ci API key.
[general] [default]
version=@VERSION version=@VERSION
host=@HOST host=@HOST
username=@USERNAME username=@USERNAME

@ -6,8 +6,8 @@
dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}" dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}"
kubernetes_version: 1.7.7 kubernetes_version: 1.7.7
service_principal: service_principal:
client_id: "{{ lookup('env', 'AZURE_CLIENT_ID') }}" client_id: "{{ azure_client_id }}"
client_secret: "{{ lookup('env', 'AZURE_SECRET') }}" client_secret: "{{ azure_secret }}"
linux_profile: linux_profile:
admin_username: azureuser admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible
@ -36,8 +36,8 @@
dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}" dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}"
kubernetes_version: 1.7.7 kubernetes_version: 1.7.7
service_principal: service_principal:
client_id: "{{ lookup('env', 'AZURE_CLIENT_ID') }}" client_id: "{{ azure_client_id }}"
client_secret: "{{ lookup('env', 'AZURE_SECRET') }}" client_secret: "{{ azure_secret }}"
linux_profile: linux_profile:
admin_username: azureuser admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible
@ -74,7 +74,7 @@
dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}" dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}"
kubernetes_version: 1.7.7 kubernetes_version: 1.7.7
service_principal: service_principal:
client_id: "{{ lookup('env', 'AZURE_CLIENT_ID') }}" client_id: "{{ azure_client_id }}"
linux_profile: linux_profile:
admin_username: azureuser admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible
@ -98,7 +98,7 @@
dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}" dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}"
kubernetes_version: 1.8.1 kubernetes_version: 1.8.1
service_principal: service_principal:
client_id: "{{ lookup('env', 'AZURE_CLIENT_ID') }}" client_id: "{{ azure_client_id }}"
linux_profile: linux_profile:
admin_username: azureuser admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible
@ -122,7 +122,7 @@
dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}" dns_prefix: "aks{{ resource_group | hash('md5') | truncate(10, True, '') }}"
kubernetes_version: 1.8.1 kubernetes_version: 1.8.1
service_principal: service_principal:
client_id: "{{ lookup('env', 'AZURE_CLIENT_ID') }}" client_id: "{{ azure_client_id }}"
linux_profile: linux_profile:
admin_username: azureuser admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSPmiqkvDH1/+MDAVDZT8381aYqp73Odz8cnD5hegNhqtXajqtiH0umVg7HybX3wt1HjcrwKJovZURcIbbcDvzdH2bnYbF93T4OLXA0bIfuIp6M86x1iutFtXdpN3TTicINrmSXEE2Ydm51iMu77B08ZERjVaToya2F7vC+egfoPvibf7OLxE336a5tPCywavvNihQjL8sjgpDT5AAScjb3YqK/6VLeQ18Ggt8/ufINsYkb+9/Ji/3OcGFeflnDXq80vPUyF3u4iIylob6RSZenC38cXmQB05tRNxS1B6BXCjMRdy0v4pa7oKM2GA4ADKpNrr0RI9ed+peRFwmsclH test@ansible

@ -1,15 +1,9 @@
- name: Prepare random number - name: Prepare random number
set_fact: set_fact:
rpfx: "{{ resource_group | hash('md5') | truncate(7, True, '') }}{{ 1000 | random }}" rpfx: "{{ resource_group | hash('md5') | truncate(7, True, '') }}{{ 1000 | random }}"
tenant_id: "{{ lookup('env','AZURE_TENANT') }}" tenant_id: "{{ azure_tenant }}"
run_once: yes run_once: yes
- name: set service principal info
set_fact:
azure_client_id: "{{ lookup('env','AZURE_CLIENT_ID') }}"
azure_secret: "{{ lookup('env','AZURE_SECRET') }}"
no_log: yes
- name: lookup service principal object id - name: lookup service principal object id
set_fact: set_fact:
object_id: "{{ lookup('azure_service_principal_attribute', object_id: "{{ lookup('azure_service_principal_attribute',

@ -4,7 +4,7 @@
url: 'https://api.cloudscale.ch/v1/volumes' url: 'https://api.cloudscale.ch/v1/volumes'
method: POST method: POST
headers: headers:
Authorization: 'Bearer {{ lookup("env", "CLOUDSCALE_API_TOKEN") }}' Authorization: 'Bearer {{ cloudscale_api_token }}'
body: body:
name: '{{ cloudscale_resource_prefix }}-duplicate' name: '{{ cloudscale_resource_prefix }}-duplicate'
size_gb: 50 size_gb: 50

@ -1,7 +0,0 @@
- name: create the provider credentials from the cloud script
set_fact:
nios_provider:
host: "{{ lookup('env', 'NIOS_HOST') }}"
username: admin
password: infoblox
- debug: var=nios_provider

@ -2,8 +2,6 @@
- name: Check that SSL is available - name: Check that SSL is available
tower_organization: tower_organization:
name: Default name: Default
environment:
TOWER_HOST: "https://{{ lookup('env', 'TOWER_HOST') }}"
register: result register: result
- name: Check we haven't changed anything - name: Check we haven't changed anything
@ -14,7 +12,6 @@
tower_organization: tower_organization:
name: Default name: Default
environment: environment:
TOWER_HOST: "https://{{ lookup('env', 'TOWER_HOST') }}"
TOWER_CERTIFICATE: /dev/null # force check failure TOWER_CERTIFICATE: /dev/null # force check failure
ignore_errors: true ignore_errors: true
register: check_ssl_is_used register: check_ssl_is_used
@ -42,7 +39,6 @@
tower_organization: tower_organization:
name: Default name: Default
environment: environment:
TOWER_HOST: "https://{{ lookup('env', 'TOWER_HOST') }}"
TOWER_CERTIFICATE: /dev/null # should not fail because verify_ssl is disabled TOWER_CERTIFICATE: /dev/null # should not fail because verify_ssl is disabled
always: always:
- name: Delete ~/.tower_cli.cfg - name: Delete ~/.tower_cli.cfg

@ -16,20 +16,20 @@
- name: Update the project (to clone the git repo) - name: Update the project (to clone the git repo)
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/projects/{{ result.id }}/update/" url: "https://{{ tower_host }}/api/v2/projects/{{ result.id }}/update/"
method: POST method: POST
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
validate_certs: false validate_certs: false
status_code: 202 status_code: 202
force_basic_auth: true force_basic_auth: true
- name: Wait for the project to be status=successful - name: Wait for the project to be status=successful
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/projects/{{ result.id }}/" url: "https://{{ tower_host }}/api/v2/projects/{{ result.id }}/"
method: GET method: GET
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
validate_certs: false validate_certs: false
force_basic_auth: true force_basic_auth: true
return_content: true return_content: true

@ -6,7 +6,7 @@
- assert: - assert:
that: that:
- "result.tower_version == '{{ lookup('env', 'TOWER_VERSION') }}'" - "result.tower_version == '{{ tower_version }}'"
- name: Make sure the default Default organization exists - name: Make sure the default Default organization exists
tower_organization: tower_organization:

@ -1,8 +1,8 @@
- name: Fetch project_base_dir - name: Fetch project_base_dir
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/config/" url: "https://{{ tower_host}}/api/v2/config/"
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
validate_certs: false validate_certs: false
return_content: true return_content: true
register: awx_config register: awx_config

@ -1,30 +1,30 @@
- name: Get unified job template ID for Demo Job Template" - name: Get unified job template ID for Demo Job Template"
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/unified_job_templates/?name=Demo+Job+Template" url: "https://{{ tower_host }}/api/v2/unified_job_templates/?name=Demo+Job+Template"
method: GET method: GET
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
validate_certs: False validate_certs: False
register: unified_job register: unified_job
- name: Build workflow - name: Build workflow
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/workflow_job_templates/" url: "https://{{ tower_host }}/api/v2/workflow_job_templates/"
body: body:
name: "Success Template" name: "Success Template"
variables: "---" variables: "---"
extra_vars: "" extra_vars: ""
body_format: 'json' body_format: 'json'
method: 'POST' method: 'POST'
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
status_code: 201 status_code: 201
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
validate_certs: False validate_certs: False
register: workflow register: workflow
- name: Add a node - name: Add a node
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/workflow_job_templates/{{ workflow.json.id }}/workflow_nodes/" url: "https://{{ tower_host }}/api/v2/workflow_job_templates/{{ workflow.json.id }}/workflow_nodes/"
body: body:
credential: null credential: null
diff_mode: null diff_mode: null
@ -38,15 +38,15 @@
verbosity: null verbosity: null
body_format: 'json' body_format: 'json'
method: 'POST' method: 'POST'
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
status_code: 201 status_code: 201
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
validate_certs: False validate_certs: False
register: node1 register: node1
- name: Add a node - name: Add a node
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/workflow_job_templates/{{ workflow.json.id }}/workflow_nodes/" url: "https://{{ tower_host }}/api/v2/workflow_job_templates/{{ workflow.json.id }}/workflow_nodes/"
body: body:
credential: null credential: null
diff_mode: null diff_mode: null
@ -60,19 +60,19 @@
verbosity: null verbosity: null
body_format: 'json' body_format: 'json'
method: 'POST' method: 'POST'
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
status_code: 201 status_code: 201
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
validate_certs: False validate_certs: False
register: node2 register: node2
- name: "Link nodes {{ node2.json.id }} to {{ node1.json.id }}" - name: "Link nodes {{ node2.json.id }} to {{ node1.json.id }}"
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/workflow_job_template_nodes/{{ node1.json.id }}/success_nodes/" url: "https://{{ tower_host }}/api/v2/workflow_job_template_nodes/{{ node1.json.id }}/success_nodes/"
body: '{ "id": {{ node2.json.id }} }' body: '{ "id": {{ node2.json.id }} }'
body_format: 'json' body_format: 'json'
method: 'POST' method: 'POST'
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
status_code: 204 status_code: 204
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
validate_certs: False validate_certs: False

@ -17,20 +17,20 @@
- name: Update the project (to clone the git repo) - name: Update the project (to clone the git repo)
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/projects/{{ result.id }}/update/" url: "https://{{ tower_host }}/api/v2/projects/{{ result.id }}/update/"
method: POST method: POST
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
validate_certs: false validate_certs: false
status_code: 202 status_code: 202
force_basic_auth: true force_basic_auth: true
- name: Wait for the project to be status=successful - name: Wait for the project to be status=successful
uri: uri:
url: "https://{{ lookup('env', 'TOWER_HOST') }}/api/v2/projects/{{ result.id }}/" url: "https://{{ tower_host }}/api/v2/projects/{{ result.id }}/"
method: GET method: GET
user: "{{ lookup('env', 'TOWER_USERNAME') }}" user: "{{ tower_username }}"
password: "{{ lookup('env', 'TOWER_PASSWORD') }}" password: "{{ tower_password }}"
validate_certs: false validate_certs: false
force_basic_auth: true force_basic_auth: true
return_content: true return_content: true

@ -253,7 +253,7 @@ class CloudProvider(CloudBase):
"""Base class for cloud provider plugins. Sets up cloud resources before delegation.""" """Base class for cloud provider plugins. Sets up cloud resources before delegation."""
TEST_DIR = 'test/integration' TEST_DIR = 'test/integration'
def __init__(self, args, config_extension='.yml'): def __init__(self, args, config_extension='.ini'):
""" """
:type args: IntegrationConfig :type args: IntegrationConfig
:type config_extension: str :type config_extension: str
@ -390,10 +390,9 @@ class CloudEnvironment(CloudBase):
pass pass
@abc.abstractmethod @abc.abstractmethod
def configure_environment(self, env, cmd): def get_environment_config(self):
"""Configuration which should be done once for each test target. """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
pass pass
@ -404,9 +403,17 @@ class CloudEnvironment(CloudBase):
""" """
pass pass
@property
def inventory_hosts(self): class CloudEnvironmentConfig(object):
"""Configuration for the environment."""
def __init__(self, env_vars=None, ansible_vars=None, module_defaults=None, callback_plugins=None):
""" """
:rtype: str | None :type env_vars: dict[str, str] | None
:type ansible_vars: dict[str, any] | None
:type module_defaults: dict[str, dict[str, any]] | None
:type callback_plugins: list[str] | None
""" """
return None self.env_vars = env_vars
self.ansible_vars = ansible_vars
self.module_defaults = module_defaults
self.callback_plugins = callback_plugins

@ -7,6 +7,7 @@ import time
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.util import ( from lib.util import (
@ -37,7 +38,7 @@ class ACMEProvider(CloudProvider):
""" """
:type args: TestConfig :type args: TestConfig
""" """
super(ACMEProvider, self).__init__(args, config_extension='.ini') super(ACMEProvider, self).__init__(args)
# The simulator must be pinned to a specific version to guarantee CI passes with the version used. # The simulator must be pinned to a specific version to guarantee CI passes with the version used.
if os.environ.get('ANSIBLE_ACME_CONTAINER'): if os.environ.get('ANSIBLE_ACME_CONTAINER'):
@ -175,12 +176,14 @@ class ACMEProvider(CloudProvider):
class ACMEEnvironment(CloudEnvironment): class ACMEEnvironment(CloudEnvironment):
"""ACME environment plugin. Updates integration test environment after delegation.""" """ACME environment plugin. Updates integration test environment after delegation."""
def configure_environment(self, env, cmd): def get_environment_config(self):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
ansible_vars = dict(
acme_host=self._get_cloud_config('acme_host'),
)
# Send the container IP down to the integration test(s) return CloudEnvironmentConfig(
cmd.append('-e') ansible_vars=ansible_vars,
cmd.append('acme_host=%s' % self._get_cloud_config('acme_host')) )

@ -7,11 +7,13 @@ from lib.util import (
ApplicationError, ApplicationError,
display, display,
is_shippable, is_shippable,
ConfigParser,
) )
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.core_ci import ( from lib.core_ci import (
@ -84,16 +86,22 @@ class AwsCloudProvider(CloudProvider):
class AwsCloudEnvironment(CloudEnvironment): class AwsCloudEnvironment(CloudEnvironment):
"""AWS cloud environment plugin. Updates integration test environment after delegation.""" """AWS cloud environment plugin. Updates integration test environment after delegation."""
def configure_environment(self, env, cmd): def get_environment_config(self):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
cmd.append('-e') parser = ConfigParser()
cmd.append('@%s' % self.config_path) parser.read(self.config_path)
cmd.append('-e') ansible_vars = dict(
cmd.append('resource_prefix=%s' % self.resource_prefix) resource_prefix=self.resource_prefix,
)
ansible_vars.update(dict(parser.items('default')))
return CloudEnvironmentConfig(
ansible_vars=ansible_vars,
)
def on_failure(self, target, tries): def on_failure(self, target, tries):
""" """

@ -7,11 +7,13 @@ from lib.util import (
ApplicationError, ApplicationError,
display, display,
is_shippable, is_shippable,
ConfigParser,
) )
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.http import ( from lib.http import (
@ -124,6 +126,8 @@ class AzureCloudProvider(CloudProvider):
config = '\n'.join('%s: %s' % (key, values[key]) for key in sorted(values)) config = '\n'.join('%s: %s' % (key, values[key]) for key in sorted(values))
config = '[default]\n' + config
self._write_config(config) self._write_config(config)
def _create_ansible_core_ci(self): def _create_ansible_core_ci(self):
@ -135,22 +139,22 @@ class AzureCloudProvider(CloudProvider):
class AzureCloudEnvironment(CloudEnvironment): class AzureCloudEnvironment(CloudEnvironment):
"""Azure cloud environment plugin. Updates integration test environment after delegation.""" """Azure cloud environment plugin. Updates integration test environment after delegation."""
def configure_environment(self, env, cmd): def get_environment_config(self):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
config = get_config(self.config_path) env_vars = get_config(self.config_path)
ansible_vars = dict(
resource_prefix=self.resource_prefix,
)
cmd.append('-e') ansible_vars.update(dict((key.lower(), value) for key, value in env_vars.items()))
cmd.append('resource_prefix=%s' % self.resource_prefix)
cmd.append('-e')
cmd.append('resource_group=%s' % config['RESOURCE_GROUP'])
cmd.append('-e')
cmd.append('resource_group_secondary=%s' % config['RESOURCE_GROUP_SECONDARY'])
for key in config: return CloudEnvironmentConfig(
env[key] = config[key] env_vars=env_vars,
ansible_vars=ansible_vars,
)
def on_failure(self, target, tries): def on_failure(self, target, tries):
""" """
@ -167,9 +171,10 @@ def get_config(config_path):
:type config_path: str :type config_path: str
:rtype: dict[str, str] :rtype: dict[str, str]
""" """
with open(config_path, 'r') as config_fd: parser = ConfigParser()
lines = [line for line in config_fd.read().splitlines() if ':' in line and line.strip() and not line.strip().startswith('#')] parser.read(config_path)
config = dict((kvp[0].strip(), kvp[1].strip()) for kvp in [line.split(':', 1) for line in lines])
config = dict((key.upper(), value) for key, value in parser.items('default'))
rg_vars = ( rg_vars = (
'RESOURCE_GROUP', 'RESOURCE_GROUP',

@ -11,6 +11,7 @@ from os.path import isfile
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.util import ConfigParser, display from lib.util import ConfigParser, display
@ -25,7 +26,7 @@ class CloudscaleCloudProvider(CloudProvider):
""" """
:type args: TestConfig :type args: TestConfig
""" """
super(CloudscaleCloudProvider, self).__init__(args, config_extension='.ini') super(CloudscaleCloudProvider, self).__init__(args)
def filter(self, targets, exclude): def filter(self, targets, exclude):
"""Filter out the cloud tests when the necessary config and resources are not available. """Filter out the cloud tests when the necessary config and resources are not available.
@ -38,6 +39,7 @@ class CloudscaleCloudProvider(CloudProvider):
super(CloudscaleCloudProvider, self).filter(targets, exclude) super(CloudscaleCloudProvider, self).filter(targets, exclude)
def setup(self): def setup(self):
"""Setup the cloud resource before delegation and register a cleanup callback."""
super(CloudscaleCloudProvider, self).setup() super(CloudscaleCloudProvider, self).setup()
if isfile(self.config_static_path): if isfile(self.config_static_path):
@ -46,28 +48,30 @@ class CloudscaleCloudProvider(CloudProvider):
verbosity=1) verbosity=1)
self.config_path = self.config_static_path self.config_path = self.config_static_path
self.managed = False self.managed = False
return True
return False
class CloudscaleCloudEnvironment(CloudEnvironment): class CloudscaleCloudEnvironment(CloudEnvironment):
"""Cloudscale cloud environment plugin. Updates integration test environment """Cloudscale cloud environment plugin. Updates integration test environment
after delegation. after delegation.
""" """
def configure_environment(self, env, cmd): def get_environment_config(self):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
parser = ConfigParser() parser = ConfigParser()
parser.read(self.config_path) parser.read(self.config_path)
changes = dict( env_vars = dict(
CLOUDSCALE_API_TOKEN=parser.get('default', 'cloudscale_api_token'), CLOUDSCALE_API_TOKEN=parser.get('default', 'cloudscale_api_token'),
) )
env.update(changes) ansible_vars = dict(
cloudscale_resource_prefix=self.resource_prefix,
)
ansible_vars.update(dict((key.lower(), value) for key, value in env_vars.items()))
cmd.append('-e') return CloudEnvironmentConfig(
cmd.append('cloudscale_resource_prefix=%s' % self.resource_prefix) env_vars=env_vars,
ansible_vars=ansible_vars,
)

@ -9,6 +9,7 @@ import time
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.util import ( from lib.util import (
@ -45,7 +46,7 @@ class CsCloudProvider(CloudProvider):
""" """
:type args: TestConfig :type args: TestConfig
""" """
super(CsCloudProvider, self).__init__(args, config_extension='.ini') super(CsCloudProvider, self).__init__(args)
# The simulator must be pinned to a specific version to guarantee CI passes with the version used. # The simulator must be pinned to a specific version to guarantee CI passes with the version used.
self.image = 'quay.io/ansible/cloudstack-test-container:1.2.0' self.image = 'quay.io/ansible/cloudstack-test-container:1.2.0'
@ -262,16 +263,27 @@ class CsCloudProvider(CloudProvider):
class CsCloudEnvironment(CloudEnvironment): class CsCloudEnvironment(CloudEnvironment):
"""CloudStack cloud environment plugin. Updates integration test environment after delegation.""" """CloudStack cloud environment plugin. Updates integration test environment after delegation."""
def configure_environment(self, env, cmd): def get_environment_config(self):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
changes = dict( parser = ConfigParser()
CLOUDSTACK_CONFIG=self.config_path, parser.read(self.config_path)
config = dict(parser.items('default'))
env_vars = dict(
CLOUDSTACK_ENDPOINT=config['endpoint'],
CLOUDSTACK_KEY=config['key'],
CLOUDSTACK_SECRET=config['secret'],
CLOUDSTACK_TIMEOUT=config['timeout'],
) )
env.update(changes) ansible_vars = dict(
cs_resource_prefix=self.resource_prefix,
)
cmd.append('-e') return CloudEnvironmentConfig(
cmd.append('cs_resource_prefix=%s' % self.resource_prefix) env_vars=env_vars,
ansible_vars=ansible_vars,
)

@ -7,6 +7,7 @@ import os
from . import ( from . import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from ..util import ( from ..util import (
@ -175,13 +176,15 @@ class ForemanEnvironment(CloudEnvironment):
Updates integration test environment after delegation. Updates integration test environment after delegation.
""" """
def get_environment_config(self):
def configure_environment(self, env, cmd):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
env_vars = dict(
FOREMAN_HOST=self._get_cloud_config('FOREMAN_HOST'),
FOREMAN_PORT=self._get_cloud_config('FOREMAN_PORT'),
)
# Send the container IP down to the integration test(s) return CloudEnvironmentConfig(
env['FOREMAN_HOST'] = self._get_cloud_config('FOREMAN_HOST') env_vars=env_vars,
env['FOREMAN_PORT'] = self._get_cloud_config('FOREMAN_PORT') )

@ -7,11 +7,13 @@ import os
from lib.util import ( from lib.util import (
display, display,
ConfigParser,
) )
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
@ -41,22 +43,19 @@ class GcpCloudProvider(CloudProvider):
class GcpCloudEnvironment(CloudEnvironment): class GcpCloudEnvironment(CloudEnvironment):
"""GCP cloud environment plugin. Updates integration test environment after delegation.""" """GCP cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self):
def configure_environment(self, env, cmd):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
cmd.append('-e') parser = ConfigParser()
cmd.append('@%s' % self.config_path) parser.read(self.config_path)
cmd.append('-e') ansible_vars = dict(
cmd.append('resource_prefix=%s' % self.resource_prefix) resource_prefix=self.resource_prefix,
)
def on_failure(self, target, tries): ansible_vars.update(dict(parser.items('default')))
"""
:type target: TestTarget return CloudEnvironmentConfig(
:type tries: int ansible_vars=ansible_vars,
""" )
if not tries and self.managed:
display.notice('%s failed' % target.name)

@ -7,6 +7,7 @@ import os
from . import ( from . import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from ..util import ( from ..util import (
@ -174,12 +175,18 @@ class NiosEnvironment(CloudEnvironment):
Updates integration test environment after delegation. Updates integration test environment after delegation.
""" """
def get_environment_config(self):
def configure_environment(self, env, cmd):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
ansible_vars = dict(
nios_provider=dict(
host=self._get_cloud_config('NIOS_HOST'),
username='admin',
password='infoblox',
),
)
# Send the container IP down to the integration test(s) return CloudEnvironmentConfig(
env['NIOS_HOST'] = self._get_cloud_config('NIOS_HOST') ansible_vars=ansible_vars,
)

@ -2,11 +2,13 @@
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.util import ( from lib.util import (
display, display,
ConfigParser,
) )
@ -43,14 +45,19 @@ class OpenNebulaCloudEnvironment(CloudEnvironment):
""" """
Updates integration test environment after delegation. Will setup the config file as parameter. Updates integration test environment after delegation. Will setup the config file as parameter.
""" """
def get_environment_config(self):
def configure_environment(self, env, cmd):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
cmd.append('-e') parser = ConfigParser()
cmd.append('@%s' % self.config_path) parser.read(self.config_path)
ansible_vars = dict(
resource_prefix=self.resource_prefix,
)
cmd.append('-e') ansible_vars.update(dict(parser.items('default')))
cmd.append('resource_prefix=%s' % self.resource_prefix)
return CloudEnvironmentConfig(
ansible_vars=ansible_vars,
)

@ -9,6 +9,7 @@ import time
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.util import ( from lib.util import (
@ -211,13 +212,14 @@ class OpenShiftCloudProvider(CloudProvider):
class OpenShiftCloudEnvironment(CloudEnvironment): class OpenShiftCloudEnvironment(CloudEnvironment):
"""OpenShift cloud environment plugin. Updates integration test environment after delegation.""" """OpenShift cloud environment plugin. Updates integration test environment after delegation."""
def configure_environment(self, env, cmd): def get_environment_config(self):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
changes = dict( env_vars = dict(
K8S_AUTH_KUBECONFIG=self.config_path, K8S_AUTH_KUBECONFIG=self.config_path,
) )
env.update(changes) return CloudEnvironmentConfig(
env_vars=env_vars,
)

@ -16,6 +16,7 @@ from lib.util import (
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.core_ci import ( from lib.core_ci import (
@ -29,7 +30,7 @@ class TowerCloudProvider(CloudProvider):
""" """
:type args: TestConfig :type args: TestConfig
""" """
super(TowerCloudProvider, self).__init__(args, config_extension='.cfg') super(TowerCloudProvider, self).__init__(args)
self.aci = None self.aci = None
self.version = '' self.version = ''
@ -162,14 +163,20 @@ class TowerCloudEnvironment(CloudEnvironment):
time.sleep(5) time.sleep(5)
def configure_environment(self, env, cmd): def get_environment_config(self):
"""Configuration which should be done once for each test target. """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
config = TowerConfig.parse(self.config_path) config = TowerConfig.parse(self.config_path)
env.update(config.environment) env_vars = config.environment
ansible_vars = dict((key.lower(), value) for key, value in env_vars.items())
return CloudEnvironmentConfig(
env_vars=env_vars,
ansible_vars=ansible_vars,
)
class TowerConfig(object): class TowerConfig(object):
@ -213,7 +220,7 @@ class TowerConfig(object):
'password', 'password',
) )
values = dict((k, parser.get('general', k)) for k in keys) values = dict((k, parser.get('default', k)) for k in keys)
config = TowerConfig(values) config = TowerConfig(values)
missing = [k for k in keys if not values.get(k)] missing = [k for k in keys if not values.get(k)]

@ -6,6 +6,7 @@ import os
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment, CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.util import ( from lib.util import (
@ -30,7 +31,7 @@ class VcenterProvider(CloudProvider):
""" """
:type args: TestConfig :type args: TestConfig
""" """
super(VcenterProvider, self).__init__(args, config_extension='.ini') super(VcenterProvider, self).__init__(args)
# The simulator must be pinned to a specific version to guarantee CI passes with the version used. # The simulator must be pinned to a specific version to guarantee CI passes with the version used.
if os.environ.get('ANSIBLE_VCSIM_CONTAINER'): if os.environ.get('ANSIBLE_VCSIM_CONTAINER'):
@ -144,13 +145,19 @@ class VcenterProvider(CloudProvider):
class VcenterEnvironment(CloudEnvironment): class VcenterEnvironment(CloudEnvironment):
"""VMware vcenter/esx environment plugin. Updates integration test environment after delegation.""" """VMware vcenter/esx environment plugin. Updates integration test environment after delegation."""
def configure_environment(self, env, cmd): def get_environment_config(self):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
cmd.append('-e') env_vars = dict(
cmd.append('vcsim=%s' % self._get_cloud_config('vcenter_host')) VCENTER_HOST=self._get_cloud_config('vcenter_host'),
)
# Send the container IP down to the integration test(s)
env['VCENTER_HOST'] = self._get_cloud_config('vcenter_host') ansible_vars = dict(
vcsim=self._get_cloud_config('vcenter_host'),
)
return CloudEnvironmentConfig(
env_vars=env_vars,
ansible_vars=ansible_vars,
)

@ -5,7 +5,8 @@ import os
from lib.cloud import ( from lib.cloud import (
CloudProvider, CloudProvider,
CloudEnvironment CloudEnvironment,
CloudEnvironmentConfig,
) )
from lib.util import ConfigParser from lib.util import ConfigParser
@ -18,7 +19,7 @@ class VultrCloudProvider(CloudProvider):
""" """
:type args: TestConfig :type args: TestConfig
""" """
super(VultrCloudProvider, self).__init__(args, config_extension='.ini') super(VultrCloudProvider, self).__init__(args)
def filter(self, targets, exclude): def filter(self, targets, exclude):
"""Filter out the cloud tests when the necessary config and resources are not available. """Filter out the cloud tests when the necessary config and resources are not available.
@ -31,32 +32,34 @@ class VultrCloudProvider(CloudProvider):
super(VultrCloudProvider, self).filter(targets, exclude) super(VultrCloudProvider, self).filter(targets, exclude)
def setup(self): def setup(self):
"""Setup the cloud resource before delegation and register a cleanup callback."""
super(VultrCloudProvider, self).setup() super(VultrCloudProvider, self).setup()
if os.path.isfile(self.config_static_path): if os.path.isfile(self.config_static_path):
self.config_path = self.config_static_path self.config_path = self.config_static_path
self.managed = False self.managed = False
return True
return False
class VultrCloudEnvironment(CloudEnvironment): class VultrCloudEnvironment(CloudEnvironment):
""" """
Updates integration test environment after delegation. Will setup the config file as parameter. Updates integration test environment after delegation. Will setup the config file as parameter.
""" """
def get_environment_config(self):
def configure_environment(self, env, cmd):
""" """
:type env: dict[str, str] :rtype: CloudEnvironmentConfig
:type cmd: list[str]
""" """
parser = ConfigParser() parser = ConfigParser()
parser.read(self.config_path) parser.read(self.config_path)
changes = dict( env_vars = dict(
VULTR_API_KEY=parser.get('default', 'key'), VULTR_API_KEY=parser.get('default', 'key'),
) )
env.update(changes)
cmd.append('-e') ansible_vars = dict(
cmd.append('vultr_resource_prefix=%s' % self.resource_prefix) vultr_resource_prefix=self.resource_prefix,
)
return CloudEnvironmentConfig(
env_vars=env_vars,
ansible_vars=ansible_vars,
)

@ -34,6 +34,7 @@ from lib.cloud import (
cloud_init, cloud_init,
get_cloud_environment, get_cloud_environment,
get_cloud_platforms, get_cloud_platforms,
CloudEnvironmentConfig,
) )
from lib.util import ( from lib.util import (
@ -110,6 +111,7 @@ from lib.metadata import (
from lib.integration import ( from lib.integration import (
integration_test_environment, integration_test_environment,
integration_test_config_file,
) )
SUPPORTED_PYTHON_VERSIONS = ( SUPPORTED_PYTHON_VERSIONS = (
@ -1107,14 +1109,14 @@ def run_setup_targets(args, test_dir, target_names, targets_dict, targets_execut
targets_executed.add(target_name) targets_executed.add(target_name)
def integration_environment(args, target, cmd, test_dir, inventory_path, ansible_config): def integration_environment(args, target, test_dir, inventory_path, ansible_config, env_config):
""" """
:type args: IntegrationConfig :type args: IntegrationConfig
:type target: IntegrationTarget :type target: IntegrationTarget
:type cmd: list[str]
:type test_dir: str :type test_dir: str
:type inventory_path: str :type inventory_path: str
:type ansible_config: str | None :type ansible_config: str | None
:type env_config: CloudEnvironmentConfig | None
:rtype: dict[str, str] :rtype: dict[str, str]
""" """
env = ansible_environment(args, ansible_config=ansible_config) env = ansible_environment(args, ansible_config=ansible_config)
@ -1124,9 +1126,11 @@ def integration_environment(args, target, cmd, test_dir, inventory_path, ansible
HTTPTESTER='1', HTTPTESTER='1',
)) ))
callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
integration = dict( integration = dict(
JUNIT_OUTPUT_DIR=os.path.abspath('test/results/junit'), JUNIT_OUTPUT_DIR=os.path.abspath('test/results/junit'),
ANSIBLE_CALLBACK_WHITELIST='junit', ANSIBLE_CALLBACK_WHITELIST=','.join(sorted(set(callback_plugins))),
ANSIBLE_TEST_CI=args.metadata.ci_provider, ANSIBLE_TEST_CI=args.metadata.ci_provider,
OUTPUT_DIR=test_dir, OUTPUT_DIR=test_dir,
INVENTORY_PATH=os.path.abspath(inventory_path), INVENTORY_PATH=os.path.abspath(inventory_path),
@ -1143,11 +1147,6 @@ def integration_environment(args, target, cmd, test_dir, inventory_path, ansible
env.update(integration) env.update(integration)
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
cloud_environment.configure_environment(env, cmd)
return env return env
@ -1160,16 +1159,31 @@ def command_integration_script(args, target, test_dir, inventory_path):
""" """
display.info('Running %s integration test script' % target.name) display.info('Running %s integration test script' % target.name)
env_config = None
if isinstance(args, PosixIntegrationConfig):
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
env_config = cloud_environment.get_environment_config()
with integration_test_environment(args, target, inventory_path) as test_env: with integration_test_environment(args, target, inventory_path) as test_env:
cmd = ['./%s' % os.path.basename(target.script_path)] cmd = ['./%s' % os.path.basename(target.script_path)]
if args.verbosity: if args.verbosity:
cmd.append('-' + ('v' * args.verbosity)) cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, cmd, test_dir, test_env.inventory_path, test_env.ansible_config) env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
cwd = os.path.join(test_env.integration_dir, 'targets', target.name) cwd = os.path.join(test_env.integration_dir, 'targets', target.name)
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd) if env_config and env_config.env_vars:
env.update(env_config.env_vars)
with integration_test_config_file(args, env_config, test_env.integration_dir) as config_path:
if config_path:
cmd += ['-e', '@%s' % config_path]
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd)
def command_integration_role(args, target, start_at_task, test_dir, inventory_path): def command_integration_role(args, target, start_at_task, test_dir, inventory_path):
@ -1182,6 +1196,8 @@ def command_integration_role(args, target, start_at_task, test_dir, inventory_pa
""" """
display.info('Running %s integration test role' % target.name) display.info('Running %s integration test role' % target.name)
env_config = None
if isinstance(args, WindowsIntegrationConfig): if isinstance(args, WindowsIntegrationConfig):
hosts = 'windows' hosts = 'windows'
gather_facts = False gather_facts = False
@ -1195,22 +1211,35 @@ def command_integration_role(args, target, start_at_task, test_dir, inventory_pa
cloud_environment = get_cloud_environment(args, target) cloud_environment = get_cloud_environment(args, target)
if cloud_environment: if cloud_environment:
hosts = cloud_environment.inventory_hosts or hosts env_config = cloud_environment.get_environment_config()
playbook = '''
- hosts: %s
gather_facts: %s
roles:
- { role: %s }
''' % (hosts, gather_facts, target.name)
with integration_test_environment(args, target, inventory_path) as test_env: with integration_test_environment(args, target, inventory_path) as test_env:
play = dict(
hosts=hosts,
gather_facts=gather_facts,
vars_files=[
test_env.vars_file,
],
roles=[
target.name,
],
)
if env_config:
play.update(dict(
vars=env_config.ansible_vars,
environment=env_config.env_vars,
module_defaults=env_config.module_defaults,
))
playbook = json.dumps([play], indent=4, sort_keys=True)
with named_temporary_file(args=args, directory=test_env.integration_dir, prefix='%s-' % target.name, suffix='.yml', content=playbook) as playbook_path: with named_temporary_file(args=args, directory=test_env.integration_dir, prefix='%s-' % target.name, suffix='.yml', content=playbook) as playbook_path:
filename = os.path.basename(playbook_path) filename = os.path.basename(playbook_path)
display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3) display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
cmd = ['ansible-playbook', filename, '-i', test_env.inventory_path, '-e', '@%s' % test_env.vars_file] cmd = ['ansible-playbook', filename, '-i', test_env.inventory_path]
if start_at_task: if start_at_task:
cmd += ['--start-at-task', start_at_task] cmd += ['--start-at-task', start_at_task]
@ -1231,7 +1260,7 @@ def command_integration_role(args, target, start_at_task, test_dir, inventory_pa
if args.verbosity: if args.verbosity:
cmd.append('-' + ('v' * args.verbosity)) cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, cmd, test_dir, test_env.inventory_path, test_env.ansible_config) env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
cwd = test_env.integration_dir cwd = test_env.integration_dir
env['ANSIBLE_ROLES_PATH'] = os.path.abspath(os.path.join(test_env.integration_dir, 'targets')) env['ANSIBLE_ROLES_PATH'] = os.path.abspath(os.path.join(test_env.integration_dir, 'targets'))

@ -3,6 +3,7 @@
from __future__ import absolute_import, print_function from __future__ import absolute_import, print_function
import contextlib import contextlib
import json
import os import os
import shutil import shutil
import tempfile import tempfile
@ -22,12 +23,17 @@ from lib.util import (
ApplicationError, ApplicationError,
display, display,
make_dirs, make_dirs,
named_temporary_file,
) )
from lib.cache import ( from lib.cache import (
CommonCache, CommonCache,
) )
from lib.cloud import (
CloudEnvironmentConfig,
)
def generate_dependency_map(integration_targets): def generate_dependency_map(integration_targets):
""" """
@ -188,6 +194,36 @@ def integration_test_environment(args, target, inventory_path):
shutil.rmtree(temp_dir) shutil.rmtree(temp_dir)
@contextlib.contextmanager
def integration_test_config_file(args, env_config, integration_dir):
"""
:type args: IntegrationConfig
:type env_config: CloudEnvironmentConfig
:type integration_dir: str
"""
if not env_config:
yield None
return
config_vars = (env_config.ansible_vars or {}).copy()
config_vars.update(dict(
ansible_test=dict(
environment=env_config.env_vars,
module_defaults=env_config.module_defaults,
)
))
config_file = json.dumps(config_vars, indent=4, sort_keys=True)
with named_temporary_file(args, 'config-file-', '.json', integration_dir, config_file) as path:
filename = os.path.relpath(path, integration_dir)
display.info('>>> Config File: %s\n%s' % (filename, config_file), verbosity=3)
yield path
class IntegrationEnvironment(object): class IntegrationEnvironment(object):
"""Details about the integration environment.""" """Details about the integration environment."""
def __init__(self, integration_dir, inventory_path, ansible_config, vars_file): def __init__(self, integration_dir, inventory_path, ansible_config, vars_file):

Loading…
Cancel
Save