Resolves issue with vmware_host module for v2.0

When this module was written back in May 2015 we were using 1.9.x. Being lazy I added to param the objects that the other functions would need. What I have noticed is in 2.0 exit_json is trying to jsonify those complex objects and failing. This PR resolves that issue with the vmware_host module.

@kamsz reported this issue in https://github.com/ansible/ansible-modules-extras/pull/1568

Playbook
```
    - name: Add Host
      local_action:
        module: vmware_host
        hostname: "{{ mgmt_ip_address }}"
        username: "{{ vcsa_user }}"
        password: "{{ vcsa_pass }}"
        datacenter_name: "{{ mgmt_vdc }}"
        cluster_name: "{{ mgmt_cluster }}"
        esxi_hostname: "{{ hostvars[item].hostname }}"
        esxi_username: "{{ esxi_username }}"
        esxi_password: "{{ site_passwd }}"
        state: present
      with_items: groups['foundation_esxi']
```

Module Testing
```
TASK [Add Host] ****************************************************************
task path: /opt/autodeploy/projects/emmet/site_deploy.yml:214
ESTABLISH LOCAL CONNECTION FOR USER: root
localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937 )" )
localhost PUT /tmp/tmppmr9i9 TO /root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/vmware_host
localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/" > /dev/null 2>&1
localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834 )" )
localhost PUT /tmp/tmpVB81f2 TO /root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/vmware_host
localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/" > /dev/null 2>&1
localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563 )" )
localhost PUT /tmp/tmpFB7VQB TO /root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/vmware_host
localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/" > /dev/null 2>&1
changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-01) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp001", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-01", "result": "'vim.HostSystem:host-15'"}
changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-02) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp002", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-02", "result": "'vim.HostSystem:host-20'"}
changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-03) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp003", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-03", "result": "'vim.HostSystem:host-21'"}

```
reviewable/pr18780/r1
Joseph Callen 9 years ago
parent ff8806eaee
commit 49cfd24ad8

@ -87,102 +87,118 @@ except ImportError:
HAS_PYVMOMI = False HAS_PYVMOMI = False
def find_host_by_cluster_datacenter(module): class VMwareHost(object):
datacenter_name = module.params['datacenter_name'] def __init__(self, module):
cluster_name = module.params['cluster_name'] self.module = module
content = module.params['content'] self.datacenter_name = module.params['datacenter_name']
esxi_hostname = module.params['esxi_hostname'] self.cluster_name = module.params['cluster_name']
self.esxi_hostname = module.params['esxi_hostname']
dc = find_datacenter_by_name(content, datacenter_name) self.esxi_username = module.params['esxi_username']
cluster = find_cluster_by_name_datacenter(dc, cluster_name) self.esxi_password = module.params['esxi_password']
self.state = module.params['state']
for host in cluster.host: self.dc = None
if host.name == esxi_hostname: self.cluster = None
return host, cluster self.host = None
self.content = connect_to_api(module)
return None, cluster
def process_state(self):
try:
def add_host_to_vcenter(module): # Currently state_update_dvs is not implemented.
cluster = module.params['cluster'] host_states = {
'absent': {
host_connect_spec = vim.host.ConnectSpec() 'present': self.state_remove_host,
host_connect_spec.hostName = module.params['esxi_hostname'] 'absent': self.state_exit_unchanged,
host_connect_spec.userName = module.params['esxi_username'] },
host_connect_spec.password = module.params['esxi_password'] 'present': {
host_connect_spec.force = True 'present': self.state_exit_unchanged,
host_connect_spec.sslThumbprint = "" 'absent': self.state_add_host,
as_connected = True }
esxi_license = None }
resource_pool = None
try: host_states[self.state][self.check_host_state()]()
task = cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license)
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
except Exception as e:
self.module.fail_json(msg=str(e))
def find_host_by_cluster_datacenter(self):
self.dc = find_datacenter_by_name(self.content, self.datacenter_name)
self.cluster = find_cluster_by_name_datacenter(self.dc, self.cluster_name)
for host in self.cluster.host:
if host.name == self.esxi_hostname:
return host, self.cluster
return None, self.cluster
def add_host_to_vcenter(self):
host_connect_spec = vim.host.ConnectSpec()
host_connect_spec.hostName = self.esxi_hostname
host_connect_spec.userName = self.esxi_username
host_connect_spec.password = self.esxi_password
host_connect_spec.force = True
host_connect_spec.sslThumbprint = ""
as_connected = True
esxi_license = None
resource_pool = None
try:
task = self.cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license)
success, result = wait_for_task(task)
return success, result
except TaskError as add_task_error:
# This is almost certain to fail the first time.
# In order to get the sslThumbprint we first connect
# get the vim.fault.SSLVerifyFault then grab the sslThumbprint
# from that object.
#
# args is a tuple, selecting the first tuple
ssl_verify_fault = add_task_error.args[0]
host_connect_spec.sslThumbprint = ssl_verify_fault.thumbprint
task = self.cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license)
success, result = wait_for_task(task) success, result = wait_for_task(task)
return success, result return success, result
except TaskError as add_task_error:
# This is almost certain to fail the first time.
# In order to get the sslThumbprint we first connect
# get the vim.fault.SSLVerifyFault then grab the sslThumbprint
# from that object.
#
# args is a tuple, selecting the first tuple
ssl_verify_fault = add_task_error.args[0]
host_connect_spec.sslThumbprint = ssl_verify_fault.thumbprint
task = cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license)
success, result = wait_for_task(task)
return success, result
def state_exit_unchanged(module):
module.exit_json(changed=False)
def state_remove_host(module):
host = module.params['host']
changed = True
result = None
if not module.check_mode:
if not host.runtime.inMaintenanceMode:
maintenance_mode_task = host.EnterMaintenanceMode_Task(300, True, None)
changed, result = wait_for_task(maintenance_mode_task)
if changed:
task = host.Destroy_Task()
changed, result = wait_for_task(task)
else:
raise Exception(result)
module.exit_json(changed=changed, result=str(result))
def state_exit_unchanged(self):
self.module.exit_json(changed=False)
def state_update_host(module): def state_remove_host(self):
module.exit_json(changed=False, msg="Currently not implemented.") changed = True
result = None
if not self.module.check_mode:
if not self.host.runtime.inMaintenanceMode:
maintenance_mode_task = self.host.EnterMaintenanceMode_Task(300, True, None)
changed, result = wait_for_task(maintenance_mode_task)
if changed:
task = self.host.Destroy_Task()
changed, result = wait_for_task(task)
else:
raise Exception(result)
self.module.exit_json(changed=changed, result=str(result))
def state_add_host(module): def state_update_host(self):
self.module.exit_json(changed=False, msg="Currently not implemented.")
changed = True def state_add_host(self):
result = None changed = True
result = None
if not module.check_mode: if not self.module.check_mode:
changed, result = add_host_to_vcenter(module) changed, result = self.add_host_to_vcenter()
module.exit_json(changed=changed, result=str(result)) self.module.exit_json(changed=changed, result=str(result))
def check_host_state(self):
self.host, self.cluster = self.find_host_by_cluster_datacenter()
def check_host_state(module): if self.host is None:
return 'absent'
content = connect_to_api(module) else:
module.params['content'] = content return 'present'
host, cluster = find_host_by_cluster_datacenter(module)
module.params['cluster'] = cluster
if host is None:
return 'absent'
else:
module.params['host'] = host
return 'present'
def main(): def main():
@ -199,27 +215,8 @@ def main():
if not HAS_PYVMOMI: if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module') module.fail_json(msg='pyvmomi is required for this module')
try: vmware_host = VMwareHost(module)
# Currently state_update_dvs is not implemented. vmware_host.process_state()
host_states = {
'absent': {
'present': state_remove_host,
'absent': state_exit_unchanged,
},
'present': {
'present': state_exit_unchanged,
'absent': state_add_host,
}
}
host_states[module.params['state']][check_host_state(module)](module)
except vmodl.RuntimeFault as runtime_fault:
module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
module.fail_json(msg=method_fault.msg)
except Exception as e:
module.fail_json(msg=str(e))
from ansible.module_utils.vmware import * from ansible.module_utils.vmware import *
from ansible.module_utils.basic import * from ansible.module_utils.basic import *

Loading…
Cancel
Save