@ -3,6 +3,7 @@
# Copyright 2015 Cristian van Ee <cristian at cvee.org>
# Copyright 2015 Cristian van Ee <cristian at cvee.org>
# Copyright 2015 Igor Gnatenko <i.gnatenko.brain@gmail.com>
# Copyright 2015 Igor Gnatenko <i.gnatenko.brain@gmail.com>
# Copyright 2018 Adam Miller <admiller@redhat.com>
#
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@ -88,6 +89,95 @@ options:
type : bool
type : bool
default : false
default : false
version_added : " 2.4 "
version_added : " 2.4 "
exclude :
description :
- Package name ( s ) to exclude when state = present , or latest . This can be a
list or a comma separated string .
version_added : " 2.7 "
skip_broken :
description :
- Skip packages with broken dependencies ( devsolve ) and are causing problems .
type : bool
default : " no "
version_added : " 2.7 "
update_cache :
description :
- Force yum to check if cache is out of date and redownload if needed .
Has an effect only if state is I ( present ) or I ( latest ) .
type : bool
default : " no "
aliases : [ expire - cache ]
version_added : " 2.7 "
update_only :
description :
- When using latest , only update installed packages . Do not install packages .
- Has an effect only if state is I ( latest )
required : false
default : " no "
type : bool
version_added : " 2.7 "
security :
description :
- If set to C ( yes ) , and C ( state = latest ) then only installs updates that have been marked security related .
type : bool
default : " no "
version_added : " 2.7 "
bugfix :
description :
- If set to C ( yes ) , and C ( state = latest ) then only installs updates that have been marked bugfix related .
required : false
default : " no "
type : bool
version_added : " 2.7 "
enable_plugin :
description :
- I ( Plugin ) name to enable for the install / update operation .
The enabled plugin will not persist beyond the transaction .
required : false
version_added : " 2.7 "
disable_plugin :
description :
- I ( Plugin ) name to disable for the install / update operation .
The disabled plugins will not persist beyond the transaction .
required : false
version_added : " 2.7 "
disable_excludes :
description :
- Disable the excludes defined in DNF config files .
- If set to C ( all ) , disables all excludes .
- If set to C ( main ) , disable excludes defined in [ main ] in yum . conf .
- If set to C ( repoid ) , disable excludes defined for given repo id .
required : false
choices : [ all , main , repoid ]
version_added : " 2.7 "
validate_certs :
description :
- This only applies if using a https url as the source of the rpm . e . g . for localinstall . If set to C ( no ) , the SSL certificates will not be validated .
- This should only set to C ( no ) used on personally controlled sites using self - signed certificates as it avoids verifying the source site .
type : bool
default : " yes "
version_added : " 2.7 "
allow_downgrade :
description :
- This is effectively a no - op in DNF as it is the default behavior of dnf , but is an accepted parameter for feature
parity / compatibility with the I ( yum ) module .
type : bool
default : False
version_added : " 2.7 "
install_repoquery :
description :
- This is effectively a no - op in DNF as it is not needed with DNF , but is an accepted parameter for feature
parity / compatibility with the I ( yum ) module .
type : bool
default : True
version_added : " 2.7 "
download_only :
description :
- Only download the packages , do not install them .
required : false
default : " no "
type : bool
version_added : " 2.7 "
notes :
notes :
- When used with a ` loop : ` each package will be processed individually , it is much more efficient to pass the list directly to the ` name ` option .
- When used with a ` loop : ` each package will be processed individually , it is much more efficient to pass the list directly to the ` name ` option .
requirements :
requirements :
@ -98,6 +188,7 @@ author:
- ' " Igor Gnatenko (@ignatenkobrain) " <i.gnatenko.brain@gmail.com> '
- ' " Igor Gnatenko (@ignatenkobrain) " <i.gnatenko.brain@gmail.com> '
- ' " Cristian van Ee (@DJMuggs) " <cristian at cvee.org> '
- ' " Cristian van Ee (@DJMuggs) " <cristian at cvee.org> '
- " Berend De Schouwer (github.com/berenddeschouwer) "
- " Berend De Schouwer (github.com/berenddeschouwer) "
- ' " Adam Miller (@maxamillion) " <admiller@redhat.com> " '
'''
'''
EXAMPLES = '''
EXAMPLES = '''
@ -147,7 +238,9 @@ EXAMPLES = '''
state : absent
state : absent
autoremove : no
autoremove : no
'''
'''
import os
import os
import tempfile
try :
try :
import dnf
import dnf
@ -160,378 +253,476 @@ try:
except ImportError :
except ImportError :
HAS_DNF = False
HAS_DNF = False
from ansible . module_utils . basic import AnsibleModule
from ansible . module_utils . _text import to_native , to_text
from ansible . module_utils . _text import to_native
from ansible . module_utils . urls import fetch_url
from ansible . module_utils . six import PY2
from ansible . module_utils . six import PY2
from distutils . version import LooseVersion
from distutils . version import LooseVersion
from ansible . module_utils . basic import AnsibleModule
from ansible . module_utils . yumdnf import YumDnf , yumdnf_argument_spec
# 64k. Number of bytes to read at a time when manually downloading pkgs via a url
BUFSIZE = 65536
class DnfModule ( YumDnf ) :
"""
DNF Ansible module back - end implementation
"""
def _ensure_dnf ( module ) :
def __init__ ( self , module ) :
if not HAS_DNF :
# This populates instance vars for all argument spec params
if PY2 :
super ( DnfModule , self ) . __init__ ( module )
package = ' python2-dnf '
self . _ensure_dnf ( )
def fetch_rpm_from_url ( self , spec ) :
# FIXME: Remove this once this PR is merged:
# https://github.com/ansible/ansible/pull/19172
# download package so that we can query it
package_name , dummy = os . path . splitext ( str ( spec . rsplit ( ' / ' , 1 ) [ 1 ] ) )
package_file = tempfile . NamedTemporaryFile ( dir = self . module . tmpdir , prefix = package_name , suffix = ' .rpm ' , delete = False )
self . module . add_cleanup_file ( package_file . name )
try :
rsp , info = fetch_url ( self . module , spec )
if not rsp :
self . module . fail_json ( msg = " Failure downloading %s , %s " % ( spec , info [ ' msg ' ] ) )
data = rsp . read ( BUFSIZE )
while data :
package_file . write ( data )
data = rsp . read ( BUFSIZE )
package_file . close ( )
except Exception as e :
self . module . fail_json ( msg = " Failure downloading %s , %s " % ( spec , to_native ( e ) ) )
return package_file . name
def _ensure_dnf ( self ) :
if not HAS_DNF :
if PY2 :
package = ' python2-dnf '
else :
package = ' python3-dnf '
if self . module . check_mode :
self . module . fail_json (
msg = " ` {0} ` is not installed, but it is required "
" for the Ansible dnf module. " . format ( package )
)
self . module . run_command ( [ ' dnf ' , ' install ' , ' -y ' , package ] , check_rc = True )
global dnf
try :
import dnf
import dnf . cli
import dnf . const
import dnf . exceptions
import dnf . subject
import dnf . util
except ImportError :
self . module . fail_json (
msg = " Could not import the dnf python module. "
" Please install ` {0} ` package. " . format ( package )
)
def _configure_base ( self , base , conf_file , disable_gpg_check , installroot = ' / ' ) :
""" Configure the dnf Base object. """
if self . enable_plugin and self . disable_plugin :
base . init_plugins ( self . disable_plugin , self . enable_plugin )
elif self . enable_plugin :
base . init_plugins ( enable_plugins = self . enable_plugin )
elif self . disable_plugin :
base . init_plugins ( self . disable_plugin )
conf = base . conf
# Turn off debug messages in the output
conf . debuglevel = 0
# Set whether to check gpg signatures
conf . gpgcheck = not disable_gpg_check
# Don't prompt for user confirmations
conf . assumeyes = True
# Set installroot
conf . installroot = installroot
# Set excludes
if self . exclude :
conf . exclude ( self . exclude )
# Set disable_excludes
if self . disable_excludes :
conf . disable_excludes = [ self . disable_excludes ]
# Set releasever
if self . releasever is not None :
conf . substitutions [ ' releasever ' ] = self . releasever
# Set skip_broken (in dnf this is strict=0)
if self . skip_broken :
conf . strict = 0
if self . download_only :
conf . downloadonly = True
# Change the configuration file path if provided
if conf_file :
# Fail if we can't read the configuration file.
if not os . access ( conf_file , os . R_OK ) :
self . module . fail_json (
msg = " cannot read configuration file " , conf_file = conf_file )
else :
conf . config_file_path = conf_file
# Read the configuration file
conf . read ( )
def _specify_repositories ( self , base , disablerepo , enablerepo ) :
""" Enable and disable repositories matching the provided patterns. """
base . read_all_repos ( )
repos = base . repos
# Disable repositories
for repo_pattern in disablerepo :
for repo in repos . get_matching ( repo_pattern ) :
repo . disable ( )
# Enable repositories
for repo_pattern in enablerepo :
for repo in repos . get_matching ( repo_pattern ) :
repo . enable ( )
def _base ( self , conf_file , disable_gpg_check , disablerepo , enablerepo , installroot ) :
""" Return a fully configured dnf Base object. """
base = dnf . Base ( )
self . _configure_base ( base , conf_file , disable_gpg_check , installroot )
self . _specify_repositories ( base , disablerepo , enablerepo )
base . fill_sack ( load_system_repo = ' auto ' )
if self . bugfix :
key = { ' advisory_type__eq ' : ' bugfix ' }
base . _update_security_filters = [ base . sack . query ( ) . filter ( * * key ) ]
if self . security :
key = { ' advisory_type__eq ' : ' security ' }
base . _update_security_filters = [ base . sack . query ( ) . filter ( * * key ) ]
if self . update_cache :
base . update_cache ( )
return base
def _package_dict ( self , package ) :
""" Return a dictionary of information for the package. """
# NOTE: This no longer contains the 'dnfstate' field because it is
# already known based on the query type.
result = {
' name ' : package . name ,
' arch ' : package . arch ,
' epoch ' : str ( package . epoch ) ,
' release ' : package . release ,
' version ' : package . version ,
' repo ' : package . repoid }
result [ ' nevra ' ] = ' {epoch} : {name} - {version} - {release} . {arch} ' . format (
* * result )
return result
def list_items ( self , command ) :
""" List package info based on the command. """
# Rename updates to upgrades
if command == ' updates ' :
command = ' upgrades '
# Return the corresponding packages
if command in [ ' installed ' , ' upgrades ' , ' available ' ] :
results = [
self . _package_dict ( package )
for package in getattr ( self . base . sack . query ( ) , command ) ( ) ]
# Return the enabled repository ids
elif command in [ ' repos ' , ' repositories ' ] :
results = [
{ ' repoid ' : repo . id , ' state ' : ' enabled ' }
for repo in self . base . repos . iter_enabled ( ) ]
# Return any matching packages
else :
else :
package = ' python3-dnf '
packages = dnf . subject . Subject ( command ) . get_best_query ( self . base . sack )
results = [ self . _package_dict ( package ) for package in packages ]
if module . check_mode :
self . module . exit_json ( results = results )
module . fail_json ( msg = " ` {0} ` is not installed, but it is required "
" for the Ansible dnf module. " . format ( package ) )
module . run_command ( [ ' dnf ' , ' install ' , ' -y ' , package ] , check_rc = True )
def _mark_package_install ( self , pkg_spec ) :
global dnf
""" Mark the package for install. """
try :
try :
import dnf
self . base . install ( pkg_spec )
import dnf . cli
except dnf . exceptions . MarkingError :
import dnf . const
self . module . fail_json ( msg = " No package {0} available. " . format ( pkg_spec ) )
import dnf . exceptions
import dnf . subject
def _parse_spec_group_file ( self ) :
import dnf . util
pkg_specs , grp_specs , filenames = [ ] , [ ] , [ ]
except ImportError :
for name in self . names :
module . fail_json ( msg = " Could not import the dnf python module. "
if name . endswith ( " .rpm " ) :
" Please install ` {0} ` package. " . format ( package ) )
if ' :// ' in name :
name = self . fetch_rpm_from_url ( name )
filenames . append ( name )
def _configure_base ( module , base , conf_file , disable_gpg_check , installroot = ' / ' , releasever = None ) :
elif name . startswith ( " @ " ) :
""" Configure the dnf Base object. """
grp_specs . append ( name [ 1 : ] )
conf = base . conf
else :
pkg_specs . append ( name )
# Turn off debug messages in the output
return pkg_specs , grp_specs , filenames
conf . debuglevel = 0
def _update_only ( self , pkgs ) :
# Set whether to check gpg signatures
installed = self . base . sack . query ( ) . installed ( )
conf . gpgcheck = not disable_gpg_check
for pkg in pkgs :
if installed . filter ( name = pkg ) :
# Don't prompt for user confirmations
self . base . package_upgrade ( pkg )
conf . assumeyes = True
def _install_remote_rpms ( self , filenames ) :
# Set installroot
if int ( dnf . __version__ . split ( " . " ) [ 0 ] ) > = 2 :
conf . installroot = installroot
pkgs = list ( sorted ( self . base . add_remote_rpms ( list ( filenames ) ) , reverse = True ) )
# Set releasever
if releasever is not None :
conf . substitutions [ ' releasever ' ] = releasever
# Change the configuration file path if provided
if conf_file :
# Fail if we can't read the configuration file.
if not os . access ( conf_file , os . R_OK ) :
module . fail_json (
msg = " cannot read configuration file " , conf_file = conf_file )
else :
else :
conf . config_file_path = conf_file
pkgs = [ ]
for filename in filenames :
# Read the configuration file
pkgs . append ( self . base . add_remote_rpm ( filename ) )
conf . read ( )
if self . update_only :
self . _update_only ( pkgs )
def _specify_repositories ( base , disablerepo , enablerepo ) :
""" Enable and disable repositories matching the provided patterns. """
base . read_all_repos ( )
repos = base . repos
# Disable repositories
for repo_pattern in disablerepo :
for repo in repos . get_matching ( repo_pattern ) :
repo . disable ( )
# Enable repositories
for repo_pattern in enablerepo :
for repo in repos . get_matching ( repo_pattern ) :
repo . enable ( )
def _base ( module , conf_file , disable_gpg_check , disablerepo , enablerepo , installroot , releasever ) :
""" Return a fully configured dnf Base object. """
base = dnf . Base ( )
_configure_base ( module , base , conf_file , disable_gpg_check , installroot , releasever )
_specify_repositories ( base , disablerepo , enablerepo )
base . fill_sack ( load_system_repo = ' auto ' )
return base
def _package_dict ( package ) :
""" Return a dictionary of information for the package. """
# NOTE: This no longer contains the 'dnfstate' field because it is
# already known based on the query type.
result = {
' name ' : package . name ,
' arch ' : package . arch ,
' epoch ' : str ( package . epoch ) ,
' release ' : package . release ,
' version ' : package . version ,
' repo ' : package . repoid }
result [ ' nevra ' ] = ' {epoch} : {name} - {version} - {release} . {arch} ' . format (
* * result )
return result
def list_items ( module , base , command ) :
""" List package info based on the command. """
# Rename updates to upgrades
if command == ' updates ' :
command = ' upgrades '
# Return the corresponding packages
if command in [ ' installed ' , ' upgrades ' , ' available ' ] :
results = [
_package_dict ( package )
for package in getattr ( base . sack . query ( ) , command ) ( ) ]
# Return the enabled repository ids
elif command in [ ' repos ' , ' repositories ' ] :
results = [
{ ' repoid ' : repo . id , ' state ' : ' enabled ' }
for repo in base . repos . iter_enabled ( ) ]
# Return any matching packages
else :
packages = dnf . subject . Subject ( command ) . get_best_query ( base . sack )
results = [ _package_dict ( package ) for package in packages ]
module . exit_json ( results = results )
def _mark_package_install ( module , base , pkg_spec ) :
""" Mark the package for install. """
try :
base . install ( pkg_spec )
except dnf . exceptions . MarkingError :
module . fail_json ( msg = " No package {0} available. " . format ( pkg_spec ) )
def _parse_spec_group_file ( names ) :
pkg_specs , grp_specs , filenames = [ ] , [ ] , [ ]
for name in names :
if name . endswith ( " .rpm " ) :
filenames . append ( name )
elif name . startswith ( " @ " ) :
grp_specs . append ( name [ 1 : ] )
else :
else :
pkg_specs . append ( name )
for pkg in pkgs :
return pkg_specs , grp_specs , filenames
self . base . package_install ( pkg )
def ensure ( self ) :
def _install_remote_rpms ( base , filenames ) :
# Accumulate failures. Package management modules install what they can
if int ( dnf . __version__ . split ( " . " ) [ 0 ] ) > = 2 :
# and fail with a message about what they can't.
pkgs = list ( sorted ( base . add_remote_rpms ( list ( filenames ) ) , reverse = True ) )
failures = [ ]
else :
allow_erasing = False
pkgs = [ ]
for filename in filenames :
# Autoremove is called alone
pkgs . append ( base . add_remote_rpm ( filename ) )
# Jump to remove path where base.autoremove() is run
for pkg in pkgs :
if not self . names and self . autoremove :
base . package_install ( pkg )
self . names = [ ]
self . state = ' absent '
def ensure ( module , base , state , names , autoremove ) :
if self . names == [ ' * ' ] and self . state == ' latest ' :
# Accumulate failures. Package management modules install what they can
self . base . upgrade_all ( )
# and fail with a message about what they can't.
else :
failures = [ ]
pkg_specs , group_specs , filenames = self . _parse_spec_group_file ( )
allow_erasing = False
if group_specs :
self . base . read_comps ( )
# Autoremove is called alone
# Jump to remove path where base.autoremove() is run
pkg_specs = [ p . strip ( ) for p in pkg_specs ]
if not names and autoremove :
filenames = [ f . strip ( ) for f in filenames ]
names = [ ]
groups = [ ]
state = ' absent '
environments = [ ]
for group_spec in ( g . strip ( ) for g in group_specs ) :
if names == [ ' * ' ] and state == ' latest ' :
group = self . base . comps . group_by_pattern ( group_spec )
base . upgrade_all ( )
if group :
else :
groups . append ( group . id )
pkg_specs , group_specs , filenames = _parse_spec_group_file ( names )
else :
if group_specs :
environment = self . base . comps . environment_by_pattern ( group_spec )
base . read_comps ( )
if environment :
environments . append ( environment . id )
pkg_specs = [ p . strip ( ) for p in pkg_specs ]
else :
filenames = [ f . strip ( ) for f in filenames ]
self . module . fail_json (
groups = [ ]
msg = " No group {0} available. " . format ( group_spec ) )
environments = [ ]
for group_spec in ( g . strip ( ) for g in group_specs ) :
if self . state in [ ' installed ' , ' present ' ] :
group = base . comps . group_by_pattern ( group_spec )
# Install files.
if group :
self . _install_remote_rpms ( filenames )
groups . append ( group . id )
else :
# Install groups.
environment = base . comps . environment_by_pattern ( group_spec )
for group in groups :
if environment :
try :
environments . append ( environment . id )
self . base . group_install ( group , dnf . const . GROUP_PACKAGE_TYPES )
except dnf . exceptions . Error as e :
# In dnf 2.0 if all the mandatory packages in a group do
# not install, an error is raised. We want to capture
# this but still install as much as possible.
failures . append ( ( group , to_native ( e ) ) )
for environment in environments :
try :
self . base . environment_install ( environment , dnf . const . GROUP_PACKAGE_TYPES )
except dnf . exceptions . Error as e :
failures . append ( ( environment , to_native ( e ) ) )
# Install packages.
if self . update_only :
self . _update_only ( pkg_specs )
else :
for pkg_spec in pkg_specs :
self . _mark_package_install ( pkg_spec )
elif self . state == ' latest ' :
# "latest" is same as "installed" for filenames.
self . _install_remote_rpms ( filenames )
for group in groups :
try :
try :
self . base . group_upgrade ( group )
except dnf . exceptions . CompsError :
# If not already installed, try to install.
self . base . group_install ( group , dnf . const . GROUP_PACKAGE_TYPES )
except dnf . exceptions . Error as e :
failures . append ( ( group , to_native ( e ) ) )
for environment in environments :
try :
try :
self . base . environment_upgrade ( environment )
except dnf . exceptions . CompsError :
# If not already installed, try to install.
self . base . environment_install ( environment , dnf . const . GROUP_PACKAGE_TYPES )
except dnf . exceptions . Error as e :
failures . append ( ( environment , to_native ( e ) ) )
if self . update_only :
self . _update_only ( pkg_specs )
else :
else :
module . fail_json (
for pkg_spec in pkg_specs :
msg = " No group {0} available. " . format ( group_spec ) )
# best effort causes to install the latest package
# even if not previously installed
if state in [ ' installed ' , ' present ' ] :
self . base . conf . best = True
# Install files.
try :
_install_remote_rpms ( base , filenames )
self . base . install ( pkg_spec )
except dnf . exceptions . MarkingError as e :
# Install groups.
failures . append ( ( pkg_spec , to_native ( e ) ) )
for group in groups :
try :
else :
base . group_install ( group , dnf . const . GROUP_PACKAGE_TYPES )
# state == absent
except dnf . exceptions . Error as e :
if self . autoremove :
# In dnf 2.0 if all the mandatory packages in a group do
self . base . conf . clean_requirements_on_remove = self . autoremove
# not install, an error is raised. We want to capture
# this but still install as much as possible.
if filenames :
failures . append ( ( group , to_native ( e ) ) )
self . module . fail_json (
msg = " Cannot remove paths -- please specify package name. " )
for environment in environments :
try :
for group in groups :
base . environment_install ( environment , dnf . const . GROUP_PACKAGE_TYPES )
except dnf . exceptions . Error as e :
failures . append ( ( environment , to_native ( e ) ) )
# Install packages.
for pkg_spec in pkg_specs :
_mark_package_install ( module , base , pkg_spec )
elif state == ' latest ' :
# "latest" is same as "installed" for filenames.
_install_remote_rpms ( base , filenames )
for group in groups :
try :
try :
try :
base . group_upgrade ( group )
self . base . group_remove ( group )
except dnf . exceptions . CompsError :
except dnf . exceptions . CompsError :
# If not already installed, try to install.
# Group is already uninstalled.
base . group_install ( group , dnf . const . GROUP_PACKAGE_TYPES )
pass
except dnf . exceptions . Error as e :
failures . append ( ( group , to_native ( e ) ) )
for environment in environments :
for environment in environments :
try :
try :
try :
base . environment_upgrad e( environment )
self . base . environment_remove ( environment )
except dnf . exceptions . CompsError :
except dnf . exceptions . CompsError :
# If not already installed, try to install.
# Environment is already uninstalled.
base . environment_install ( environment , dnf . const . GROUP_PACKAGE_TYPES )
pass
except dnf . exceptions . Error as e :
failures . append ( ( environment , to_native ( e ) ) )
installed = self . base . sack . query ( ) . installed ( )
for pkg_spec in pkg_specs :
for pkg_spec in pkg_specs :
if installed . filter ( name = pkg_spec ) :
# best effort causes to install the latest package
self . base . remove ( pkg_spec )
# even if not previously installed
base . conf . best = True
# Like the dnf CLI we want to allow recursive removal of dependent
try :
# packages
base . install ( pkg_spec )
allow_erasing = True
except dnf . exceptions . MarkingError as e :
failures . append ( ( pkg_spec , to_native ( e ) ) )
if self . autoremove :
self . base . autoremove ( )
if not self . base . resolve ( allow_erasing = allow_erasing ) :
if failures :
self . module . fail_json (
msg = ' Failed to install some of the specified packages ' ,
failures = failures
)
self . module . exit_json ( msg = " Nothing to do " )
else :
else :
# state == absent
if self . module . check_mode :
if autoremove :
if failures :
base . conf . clean_requirements_on_remove = autoremove
self . module . fail_json (
msg = ' Failed to install some of the specified packages ' ,
if filenames :
failures = failures
module . fail_json (
)
msg = " Cannot remove paths -- please specify package name. " )
self . module . exit_json ( changed = True )
for group in groups :
try :
try :
self . base . download_packages ( self . base . transaction . install_set )
base . group_remove ( group )
except dnf . exceptions . DownloadError as e :
except dnf . exceptions . CompsError :
self . module . fail_json ( msg = " Failed to download packages: {0} " . format ( to_text ( e ) ) )
# Group is already uninstalled.
pass
response = { ' changed ' : True , ' results ' : [ ] }
if self . download_only :
for environment in environments :
for package in self . base . transaction . install_set :
try :
response [ ' results ' ] . append ( " Downloaded: {0} " . format ( package ) )
base . environment_remove ( environment )
self . module . exit_json ( * * response )
except dnf . exceptions . CompsError :
else :
# Environment is already uninstalled.
self . base . do_transaction ( )
pass
for package in self . base . transaction . install_set :
response [ ' results ' ] . append ( " Installed: {0} " . format ( package ) )
installed = base . sack . query ( ) . installed ( )
for package in self . base . transaction . remove_set :
for pkg_spec in pkg_specs :
response [ ' results ' ] . append ( " Removed: {0} " . format ( package ) )
if installed . filter ( name = pkg_spec ) :
base . remove ( pkg_spec )
# Like the dnf CLI we want to allow recursive removal of dependent
# packages
allow_erasing = True
if autoremove :
base . autoremove ( )
if not base . resolve ( allow_erasing = allow_erasing ) :
if failures :
module . fail_json ( msg = ' Failed to install some of the '
' specified packages ' ,
failures = failures )
module . exit_json ( msg = " Nothing to do " )
else :
if module . check_mode :
if failures :
if failures :
module . fail_json ( msg = ' Failed to install some of the '
self . module . fail_json (
' specified packages ' ,
msg = ' Failed to install some of the specified packages ' ,
failures = failures )
failures = failures
module . exit_json ( changed = True )
)
self . module . exit_json ( * * response )
base . download_packages ( base . transaction . install_set )
base . do_transaction ( )
@staticmethod
response = { ' changed ' : True , ' results ' : [ ] }
def has_dnf ( ) :
for package in base . transaction . install_set :
return HAS_DNF
response [ ' results ' ] . append ( " Installed: {0} " . format ( package ) )
for package in base . transaction . remove_set :
def run ( self ) :
response [ ' results ' ] . append ( " Removed: {0} " . format ( package ) )
""" The main function. """
if failures :
# Check if autoremove is called correctly
module . fail_json ( msg = ' Failed to install some of the '
if self . autoremove :
' specified packages ' ,
if LooseVersion ( dnf . __version__ ) < LooseVersion ( ' 2.0.1 ' ) :
failures = failures )
self . module . fail_json ( msg = " Autoremove requires dnf>=2.0.1. Current dnf version is %s " % dnf . __version__ )
module . exit_json ( * * response )
if self . state not in [ " absent " , None ] :
self . module . fail_json ( msg = " Autoremove should be used alone or with state=absent " )
# Set state as installed by default
# This is not set in AnsibleModule() because the following shouldn't happend
# - dnf: autoremove=yes state=installed
if self . state is None :
self . state = ' installed '
if self . list :
self . base = self . _base (
self . conf_file , self . disable_gpg_check , self . disablerepo ,
self . enablerepo , self . installroot
)
self . list_items ( self . module , self . list )
else :
# Note: base takes a long time to run so we want to check for failure
# before running it.
if not dnf . util . am_i_root ( ) :
self . module . fail_json ( msg = " This command has to be run under the root user. " )
self . base = self . _base (
self . conf_file , self . disable_gpg_check , self . disablerepo ,
self . enablerepo , self . installroot
)
self . ensure ( )
def main ( ) :
def main ( ) :
""" The main function. """
# state=installed name=pkgspec
# state=removed name=pkgspec
# state=latest name=pkgspec
#
# informational commands:
# list=installed
# list=updates
# list=available
# list=repos
# list=pkgspec
module = AnsibleModule (
module = AnsibleModule (
argument_spec = dict (
* * yumdnf_argument_spec
name = dict ( aliases = [ ' pkg ' ] , type = ' list ' ) ,
)
state = dict (
choices = [ ' absent ' , ' present ' , ' installed ' , ' removed ' , ' latest ' ] ,
module_implementation = DnfModule ( module )
default = ' present ' ,
try :
) ,
module_implementation . run ( )
enablerepo = dict ( type = ' list ' , default = [ ] ) ,
except dnf . exceptions . RepoError as de :
disablerepo = dict ( type = ' list ' , default = [ ] ) ,
module . exit_json ( msg = " Failed to synchronize repodata: {0} " . format ( de ) )
list = dict ( ) ,
conf_file = dict ( default = None , type = ' path ' ) ,
disable_gpg_check = dict ( default = False , type = ' bool ' ) ,
installroot = dict ( default = ' / ' , type = ' path ' ) ,
autoremove = dict ( type = ' bool ' , default = False ) ,
releasever = dict ( default = None ) ,
) ,
required_one_of = [ [ ' name ' , ' list ' , ' autoremove ' ] ] ,
mutually_exclusive = [ [ ' name ' , ' list ' ] , [ ' autoremove ' , ' list ' ] ] ,
supports_check_mode = True )
params = module . params
_ensure_dnf ( module )
# Check if autoremove is called correctly
if params [ ' autoremove ' ] :
if LooseVersion ( dnf . __version__ ) < LooseVersion ( ' 2.0.1 ' ) :
module . fail_json ( msg = " Autoremove requires dnf>=2.0.1. Current dnf version is %s " % dnf . __version__ )
if params [ ' state ' ] not in [ " absent " , None ] :
module . fail_json ( msg = " Autoremove should be used alone or with state=absent " )
# Set state as installed by default
# This is not set in AnsibleModule() because the following shouldn't happend
# - dnf: autoremove=yes state=installed
if params [ ' state ' ] is None :
params [ ' state ' ] = ' installed '
if params [ ' list ' ] :
base = _base (
module , params [ ' conf_file ' ] , params [ ' disable_gpg_check ' ] ,
params [ ' disablerepo ' ] , params [ ' enablerepo ' ] , params [ ' installroot ' ] ,
params [ ' releasever ' ] )
list_items ( module , base , params [ ' list ' ] )
else :
# Note: base takes a long time to run so we want to check for failure
# before running it.
if not dnf . util . am_i_root ( ) :
module . fail_json ( msg = " This command has to be run under the root user. " )
base = _base (
module , params [ ' conf_file ' ] , params [ ' disable_gpg_check ' ] ,
params [ ' disablerepo ' ] , params [ ' enablerepo ' ] , params [ ' installroot ' ] ,
params [ ' releasever ' ] )
ensure ( module , base , params [ ' state ' ] , params [ ' name ' ] , params [ ' autoremove ' ] )
if __name__ == ' __main__ ' :
if __name__ == ' __main__ ' :