Merge pull request #192 from dw/dmw

Enable initial regression tests in Travis
pull/193/head
dw 7 years ago committed by GitHub
commit 22698715a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,5 +1,5 @@
#!/bin/bash -ex
# Run tests/ansible/integration/all.yml under Ansible and Ansible-Mitogen
# Run tests/ansible/all.yml under Ansible and Ansible-Mitogen
TRAVIS_BUILD_DIR="${TRAVIS_BUILD_DIR:-`pwd`}"
TMPDIR="/tmp/ansible-tests-$$"
@ -58,7 +58,7 @@ echo travis_fold:end:job_setup
echo travis_fold:start:mitogen_linear
/usr/bin/time ./mitogen_ansible_playbook.sh \
integration/all.yml \
all.yml \
-vvv \
-i "${TMPDIR}/hosts"
echo travis_fold:end:mitogen_linear
@ -66,7 +66,7 @@ echo travis_fold:end:mitogen_linear
echo travis_fold:start:vanilla_ansible
/usr/bin/time ./run_ansible_playbook.sh \
integration/all.yml \
all.yml \
-vvv \
-i "${TMPDIR}/hosts"
echo travis_fold:end:vanilla_ansible

@ -483,14 +483,29 @@ class Stream(mitogen.core.Stream):
)
)
def on_disconnect(self, broker):
_reaped = False
def _reap_child(self):
"""
Reap the child process during disconnection.
"""
if self._reaped:
# on_disconnect() may be invoked more than once, for example, if
# there is still a pending message to be sent after the first
# on_disconnect() call.
return
pid, status = os.waitpid(self.pid, os.WNOHANG)
if pid:
LOG.debug('%r: child process exit status was %d', self, status)
else:
LOG.debug('%r: child process still alive, sending SIGTERM', self)
os.kill(self.pid, signal.SIGTERM)
pid, status = os.waitpid(self.pid, 0)
os.waitpid(self.pid, 0)
self._reaped = True
def on_disconnect(self, broker):
self._reap_child()
super(Stream, self).on_disconnect(broker)
# Minimised, gzipped, base64'd and passed to 'python -c'. It forks, dups

@ -1,11 +1,9 @@
# Verify the behaviour of _low_level_execute_command().
- hosts: all
- name: integration/action__low_level_execute_command.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/action__low_level_execute_command.yml
assert:
that: true
# "echo -en" to test we actually hit bash shell too.
- name: Run raw module without sudo

@ -1,11 +1,9 @@
- hosts: all
- name: integration/action/make_tmp_path.yml
hosts: all
any_errors_fatal: true
gather_facts: true
tasks:
- name: integration/action/make_tmp_path.yml
assert:
that: true
- action_passthrough:
method: _make_tmp_path

@ -1,10 +1,8 @@
- hosts: all
- name: integration/action/remote_file_exists.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/action/remote_file_exists.yml
assert:
that: true
- file:
path: /tmp/does-not-exist

@ -1,9 +1,10 @@
- hosts: all
- name: integration/action/transfer_data.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/action/transfer_data.yml
file:
- file:
path: /tmp/transfer-data
state: absent

@ -1,9 +1,7 @@
- hosts: all
- name: integration/become/sudo_flags_failure.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/become/sudo_flags_failure.yml
assert:
that: true
- name: Verify behaviour for bad sudo flags.
shell: whoami

@ -1,9 +1,7 @@
- hosts: all
- name: integration/become/sudo_nonexistent.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/become/sudo_nonexistent.yml
assert:
that: true
- name: Verify behaviour for non-existent accounts.
shell: whoami

@ -1,11 +1,9 @@
# Verify passwordless sudo behaviour in various cases.
- hosts: all
- name: integration/become/sudo_basic.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/become/sudo_basic.yml
assert:
that: true
- name: Verify we aren't root
shell: whoami

@ -1,11 +1,9 @@
# Verify passwordful sudo behaviour
- hosts: all
- name: integration/become/sudo_password.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/become/sudo_password.yml
assert:
that: true
- name: Ensure sudo password absent but required.
shell: whoami

@ -1,11 +1,9 @@
# Verify requiretty support
- hosts: all
- name: integration/become/sudo_requiretty.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/become/sudo_requiretty.yml
assert:
that: true
- name: Verify we can login to a non-passworded requiretty account
shell: whoami

@ -1,10 +1,10 @@
# Ensure 'local' connections are grabbed.
- hosts: all
- name: integration/connection_loader__local_blemished.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/connection_loader__local_blemished.yml
determine_strategy:
- determine_strategy:
- custom_python_detect_environment:
connection: local

@ -1,10 +1,10 @@
# Ensure paramiko connections aren't grabbed.
- hosts: all
- name: integration/connection_loader__paramiko_unblemished.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/connection_loader__paramiko_unblemished.yml
custom_python_detect_environment:
- custom_python_detect_environment:
connection: paramiko
register: out

@ -1,10 +1,10 @@
# Ensure 'ssh' connections are grabbed.
- hosts: all
- name: integration/connection_loader__ssh_blemished.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/connection_loader__ssh_blemished.yml
determine_strategy:
- determine_strategy:
- custom_python_detect_environment:
connection: ssh

@ -1,15 +1,13 @@
# Verify a maximum number of contexts are possible on one machine.
- hosts: all
- name: integration/context_service/lru_one_target.yml
hosts: all
any_errors_fatal: true
vars:
max_interps: "{{lookup('env', 'MITOGEN_MAX_INTERPRETERS')}}"
ubound: "{{max_interps|int + 1}}"
tasks:
- name: integration/context_service/lru_one_target.yml
assert:
that: true
- name: Reset all connections
mitogen_shutdown_all:

@ -2,12 +2,10 @@
# Test sudo_flags respects -E.
#
- hosts: all
- name: integration/playbook_semantics/become_flags.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/playbook_semantics/become_flags.yml
assert:
that: true
- name: "without -E"
become: true

@ -1,4 +1,5 @@
- hosts: all
- name: integration/playbook_semantics/delegate_to.yml
hosts: all
any_errors_fatal: true
tasks:
#

@ -1,6 +1,7 @@
# Ensure environment: is preserved during call.
- hosts: all
- name: integration/playbook_semantics/environment.yml
hosts: all
any_errors_fatal: true
tasks:
- shell: echo $SOME_ENV

@ -1,12 +1,10 @@
# Verify with_items that modifies the execution environment still executes in
# the correct context.
- hosts: all
- name: integration/playbook_semantics/with_items.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/playbook_semantics/with_items.yml
assert:
that: true
- name: Spin up a few interpreters
shell: whoami

@ -1,7 +1,7 @@
# Verify 'async: <timeout>' functions as desired.
- hosts: all
- name: integration/runner/async_job_timeout.yml
hosts: all
any_errors_fatal: true
tasks:

@ -1,7 +1,8 @@
# Verify behaviour of a single asynchronous task, and presence of all output
# fields.
- hosts: all
- name: integration/runner/async_one_job.yml
hosts: all
any_errors_fatal: true
tasks:

@ -1,4 +1,6 @@
- hosts: all
- name: integration/runner/async_two_simultaneous_jobs.yml
hosts: all
any_errors_fatal: true
tasks:

@ -1,9 +1,10 @@
- hosts: all
- name: integration/runner__builtin_command_module.yml
hosts: all
any_errors_fatal: true
gather_facts: true
tasks:
- name: integration/runner__builtin_command_module.yml
command: hostname
- command: hostname
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,8 +1,9 @@
- hosts: all
- name: integration/runner__custom_bash_old_style_module.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/runner__custom_bash_old_style_module.yml
custom_bash_old_style_module:
- custom_bash_old_style_module:
foo: true
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,8 +1,8 @@
- hosts: all
- name: integration/runner__custom_bash_want_json_module.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/runner__custom_bash_want_json_module.yml
custom_bash_want_json_module:
- custom_bash_want_json_module:
foo: true
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,8 +1,8 @@
- hosts: all
- name: integration/runner__custom_binary_producing_json.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/runner__custom_binary_producing_json.yml
custom_binary_producing_json:
- custom_binary_producing_json:
foo: true
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,7 +1,7 @@
- hosts: all
- name: integration/runner__custom_binary_producing_junk.yml
hosts: all
tasks:
- name: integration/runner__custom_binary_producing_junk.yml
custom_binary_producing_junk:
- custom_binary_producing_junk:
foo: true
with_sequence: start=1 end={{end|default(1)}}
ignore_errors: true

@ -1,7 +1,7 @@
- hosts: all
- name: integration/runner__custom_binary_single_null.yml
hosts: all
tasks:
- name: integration/runner__custom_binary_single_null.yml
custom_binary_single_null:
- custom_binary_single_null:
foo: true
with_sequence: start=1 end={{end|default(1)}}
ignore_errors: true

@ -1,8 +1,8 @@
- hosts: all
- name: integration/runner__custom_perl_json_args_module.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/runner__custom_perl_json_args_module.yml
custom_perl_json_args_module:
- custom_perl_json_args_module:
foo: true
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,8 +1,8 @@
- hosts: all
- name: integration/runner__custom_perl_want_json_module.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/runner__custom_perl_want_json_module.yml
custom_perl_want_json_module:
- custom_perl_want_json_module:
foo: true
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,8 +1,8 @@
- hosts: all
- name: integration/runner__custom_python_json_args_module.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/runner__custom_python_json_args_module.yml
custom_python_json_args_module:
- custom_python_json_args_module:
foo: true
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,8 +1,8 @@
- hosts: all
- name: integration/runner__custom_python_new_style_module.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/runner__custom_python_new_style_module.yml
custom_python_new_style_module:
- custom_python_new_style_module:
foo: true
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,8 +1,8 @@
- hosts: all
- name: integration/runner__custom_python_want_json_module.yml
hosts: all
any_errors_fatal: true
tasks:
- name: integration/runner__custom_python_want_json_module.yml
custom_python_want_json_module:
- custom_python_want_json_module:
foo: true
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,12 +1,11 @@
- hosts: all
- name: integration/runner/forking_behaviour.yml
hosts: all
any_errors_fatal: true
tasks:
# Verify non-async jobs run in-process.
- debug: msg={{is_mitogen}}
- name: get process ID.
custom_python_detect_environment:
register: sync_proc1

@ -3,12 +3,12 @@
# when generating temporary paths created by the runner.py code executing
# remotely.
#
- hosts: all
- name: integration/runner__remote_tmp.yml
hosts: all
any_errors_fatal: true
gather_facts: true
tasks:
- name: integration/runner__remote_tmp.yml
bash_return_paths:
- bash_return_paths:
register: output
- assert:

@ -15,6 +15,11 @@ from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
if not type(self._connection).__module__.startswith('ansible_mitogen'):
return {
'changed': False
}
self._connection._connect()
return {
'changed': True,

@ -13,6 +13,10 @@ import sys
def main():
module = AnsibleModule(argument_spec={})
module.exit_json(
argv=sys.argv,
env=dict(os.environ),
cwd=os.getcwd(),
python_path=sys.path,
pid=os.getpid(),
ppid=os.getppid(),
uid=os.getuid(),

@ -0,0 +1,25 @@
#!/usr/bin/python
# I am an Ansible new-style Python module. I leak state from each invocation
# into a class variable and a global variable.
from ansible.module_utils.basic import AnsibleModule
leak1 = []
class MyClass:
leak2 = []
def main():
module = AnsibleModule(argument_spec={'name': {'type': 'str'}})
leak1.append(module.params['name'])
MyClass.leak2.append(module.params['name'])
module.exit_json(
leak1=leak1,
leak2=MyClass.leak2,
)
if __name__ == '__main__':
main()

@ -1,11 +1,9 @@
- import_playbook: issue_109.yml
- import_playbook: issue_113.yml
- import_playbook: issue_118.yml
- import_playbook: issue_122.yml
- import_playbook: issue_131.yml
- import_playbook: issue_140.yml
- import_playbook: issue_152.yml
- import_playbook: issue_152b.yml
- import_playbook: issue_154.yml
- import_playbook: issue_174.yml
- import_playbook: issue_177.yml
- import_playbook: issue_109__target_has_old_ansible_installed.yml
- import_playbook: issue_113__duplicate_module_imports.yml
- import_playbook: issue_118__script_not_marked_exec.yml
- import_playbook: issue_122__environment_difference.yml
- import_playbook: issue_140__thread_pileup.yml
- import_playbook: issue_152__local_action_wrong_interpreter.yml
- import_playbook: issue_152__virtualenv_python_fails.yml
- import_playbook: issue_154__module_state_leaks.yml
- import_playbook: issue_177__copy_module_failing.yml

@ -0,0 +1,3 @@
# I am a dummy ansible.py, that would cause 'import ansible' to fail to trigger
# the Mitogen module loader prior to issue #109. I don't need to contain
# anything, I just need to exist on PYTHONPATH.

@ -1,5 +0,0 @@
# Reproduction for issue #109.
- hosts: all
roles:
- issue_109

@ -0,0 +1,29 @@
# issue #109: ensure that any 'ansible' module or package on the remote machine
# does not conflict with operation.
- name: regression/issue_109__target_has_old_ansible_installed.yml
hosts: all
any_errors_fatal: true
gather_facts: true
tasks:
# Copy the naughty 'ansible' into place.
- copy:
dest: "{{ansible_user_dir}}/ansible.py"
src: ansible.py
# Restart the connection.
- mitogen_shutdown_all:
- custom_python_detect_environment:
register: env
# Verify interpreter config would actually trigger the bug.
- assert:
that:
- env.cwd == ansible_user_dir
- (not env.mitogen_loaded) or (env.python_path.count("") == 1)
# Run some new-style modules that 'from ansible.module_utils...'
- stat:
path: /

@ -1,17 +0,0 @@
- hosts: all
tasks:
- name: Get auth token
uri:
url: "https://httpbin.org/post"
method: POST
body: "client_id=admin-cli&username=admin&\
password=keycloak_admin_password&grant_type=password"
return_content: true
validate_certs: false
register: r_token
no_log: false
run_once: true
- assert:
that: r_token.status == 200

@ -0,0 +1,24 @@
# issue #113: ensure CookieJar duplicate import issue does not reappear simply
# by exercisizng the uri package.
- name: regression/issue_113__duplicate_module_imports.yml
any_errors_fatal: true
hosts: all
tasks:
- name: Get auth token
uri:
url: "http://127.0.0.1:14321/post"
method: POST
body: "client_id=admin-cli&username=admin&\
password=keycloak_admin_password&grant_type=password"
return_content: true
validate_certs: false
register: out
ignore_errors: true
- assert:
that:
- out.status == -1
- out.url == 'http://127.0.0.1:14321/post'

@ -1,9 +1,8 @@
# issue #118 repro: chmod +x not happening during script upload
#
- name: saytrue
- name: regression/issue_118__script_not_marked_exec.yml
hosts: all
become: True
tasks:
- name: run script saytrue
script: scripts/issue_118_saytrue
- script: scripts/issue_118_saytrue

@ -1,5 +0,0 @@
- hosts: all
tasks:
- script: scripts/print_env.sh
register: env
- debug: msg={{env}}

@ -0,0 +1,14 @@
# issue #122: still-undiagnosed environment difference between vanilla and
# Mitogen Ansible.
#
# TODO: finish me.
#
- name: regression/issue_122__environment_difference.yml
hosts: all
tasks:
- script: scripts/print_env.sh
register: env
- debug: msg={{env}}

@ -1,56 +0,0 @@
# Hopeful reproduction for issue #131.
# Run lots of steps (rather than just one) so WorkerProcess and suchlike
# machinery is constantly recreated.
- hosts: all
tasks:
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"
- shell: "true"

@ -1,24 +0,0 @@
# Reproduction for issue #140.
- hosts: all
tasks:
- name: Create file tree
connection: local
shell: >
mkdir filetree;
for i in `seq 1 1000` ; do echo $i > filetree/$i ; done
args:
creates: filetree
- name: Delete remote file tree
shell: rm -rf /tmp/filetree
- name: Trigger nasty process pileup
synchronize:
src: "{{ item.src }}"
dest: "/tmp/filetree"
with_filetree:
- filetree
when: item.state == 'file'

@ -0,0 +1,29 @@
# issue #140: with_filetree use caused unbounded (>500) thread growth in target
# interpreter. No easy hook to check thread count, but doing a 1000 item
# with_items should crash for other reasons (RAM, file descriptor count, ..)
- name: regression/issue_140__thread_pileup.yml
hosts: all
any_errors_fatal: true
tasks:
- name: Create file tree
connection: local
shell: >
mkdir filetree;
for i in `seq 1 1000` ; do echo $i > filetree/$i ; done
args:
creates: filetree
- name: Delete remote file tree
shell: rm -rf /tmp/filetree
- name: Trigger nasty process pileup
synchronize:
src: "{{ item.src }}"
dest: "/tmp/filetree"
with_filetree:
- filetree
when: item.state == 'file'

@ -1,19 +1,31 @@
# issue #152 (b): local connections were not receiving
# ansible_python_interpreter treatment, breaking virtualenvs.
# ansible_python_interpreter treatment, breaking virtualenvs. Verify this is
# fixed by writing out a wrapper script that sets an environment variable we
# can test for.
# To test:
# - Ensure system Python does not have boto3 installed.
# - Run /path/to/venv/ansible-playbook ... with the virtualenv deactivated.
# Observe success.
# - Run ansible-playbook ... with the virtualenv activated. Observe success.
- hosts: all
- name: regression/issue_152__local_action_wrong_interpreter.yml
hosts: all
connection: local
any_errors_fatal: true
tasks:
- name: regression/issue_152__local_action_wrong_interpreter.yml
connection: local
become: true
shell: pip uninstall boto3
ignore_errors: true
- cloudformation_facts:
connection: local
- copy:
dest: /tmp/issue_152_interpreter.sh
mode: u+x
content: |
#!/bin/bash
export CUSTOM_INTERPRETER=1
exec python2.7 "$@"
- custom_python_detect_environment:
vars:
ansible_python_interpreter: /tmp/issue_152_interpreter.sh
register: out
- assert:
that:
- out.env.CUSTOM_INTERPRETER == "1"
- file:
path: /tmp/issue_152_interpreter.sh
state: absent

@ -1,19 +1,21 @@
- hosts: all
- name: regression/issue_152__virtualenv_python_fails.yml
any_errors_fatal: true
hosts: all
tasks:
- name: Make virtualenv
pip:
virtualenv: /tmp/issue_151_virtualenv
name: psycopg2
- name: Use virtualenv for the Python interpeter
set_fact: ansible_python_interpreter=/tmp/issue_151_virtualenv/bin/python
# Can't use pip module because you can't fricking just create a virtualenv,
# must call it directly.
- shell: virtualenv /tmp/issue_152_virtualenv
- name: Ensure the app DB user exists
postgresql_user:
db: postgres
login_host: localhost
login_user: "{{ansible_user_id}}"
login_password: "x"
name: fakeuser
state: present
- custom_python_detect_environment:
vars:
ansible_python_interpreter: /tmp/issue_152_virtualenv/bin/python
register: out
- assert:
that:
- out.sys_executable == "/tmp/issue_152_virtualenv/bin/python"
- file:
path: /tmp/issue_152_virtualenv
state: absent

@ -1,22 +0,0 @@
- hosts: all
become: true
vars:
repo_baseurl: "http://myurl.com"
default_repos:
- repo: demo-repo1
description: Base software packages
url: "{{repo_baseurl}}/repo1"
- repo: demo-repo2
description: Misc packages
url: "{{repo_baseurl}}/repo2"
tasks:
- name: Create multiple yum repos
yum_repository:
name: '{{item.repo}}'
http_caching: packages
gpgcheck: no
description: '{{item.description}}'
state: present
baseurl: '{{item.url}}'
enabled: yes
with_items: '{{ default_repos }}'

@ -0,0 +1,18 @@
# issue #154: yum_repository module leaks state via a class variable, so it
# must be reinitialized or cleared out somehow on each invocation.
- name: regression/issue_154__module_state_leaks.yml
any_errors_fatal: true
hosts: all
tasks:
- custom_python_leaky_class_vars:
name: David
with_sequence: start=0 end=3
register: out
- assert:
that:
- out.results[item|int].leak1 == ["David"]
- out.results[item|int].leak2 == ["David"]
with_sequence: start=0 end=3

@ -1,5 +0,0 @@
- hosts: all
tasks:
- name: add nginx ppa
become: yes
apt_repository: repo='ppa:nginx/stable' update_cache=yes

@ -1,7 +0,0 @@
- hosts: all
tasks:
- name: copy repo configs
copy: src=/etc/{{ item }} dest=/tmp/{{item}} mode=0644
with_items:
- passwd
- hosts

@ -0,0 +1,19 @@
- name: regression/issue_177__copy_module_failing.yml
any_errors_fatal: true
hosts: all
tasks:
- copy:
src: /etc/{{item}}
dest: /tmp/{{item}}
mode: 0644
with_items:
- passwd
- hosts
- file:
path: /tmp/{{item}}
state: absent
with_items:
- passwd
- hosts

@ -1,9 +0,0 @@
---
- name: Decrypt SSH-Keys
include_role:
name: issue_109_add_ssh_key
- name: Gather Facts
include_role:
name: issue_109_gather_facts

@ -1,2 +1,2 @@
#!/bin/bash
/bin/true
command true

@ -16,15 +16,16 @@ FROM debian:stable
RUN apt-get update
RUN \
apt-get install -y python2.7 openssh-server sudo rsync git strace \
libjson-perl && \
apt-get clean
libjson-perl python-virtualenv && \
apt-get clean && \
rm -rf /var/cache/apt
"""
CENTOS_DOCKERFILE = r"""
FROM centos:7
RUN yum clean all && \
yum -y install -y python2.7 openssh-server sudo rsync git strace sudo \
perl-JSON && \
perl-JSON python-virtualenv && \
yum clean all && \
groupadd sudo && \
ssh-keygen -t rsa -f /etc/ssh/ssh_host_rsa_key

Loading…
Cancel
Save