From fa99a1785b8142c7573e1a7493cf4adf67a82635 Mon Sep 17 00:00:00 2001 From: Toshio Kuratomi Date: Jan 13 2016 02:33:00 +0000 Subject: Ansible 2.0.0.1 from upstream. Rewrite with many bugfixes, rewritten code, and new features. See the upstream changelog for details: https://github.com/ansible/ansible/blob/devel/CHANGELOG.md --- diff --git a/.gitignore b/.gitignore index 22a4239..98ba0a7 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,5 @@ /ansible-1.9.2.tar.gz /ansible-1.9.3.tar.gz /ansible-1.9.4.tar.gz +/ansible-2.0.0.1.tar.gz +/ansible-unittests.tar.xz diff --git a/0001-fix-2043-strip-empty-dict-from-end-of-pull-stream.patch b/0001-fix-2043-strip-empty-dict-from-end-of-pull-stream.patch deleted file mode 100644 index 2bf5572..0000000 --- a/0001-fix-2043-strip-empty-dict-from-end-of-pull-stream.patch +++ /dev/null @@ -1,41 +0,0 @@ -From 64b8596250e58a55eee8f2d4323d35ca32a8cd53 Mon Sep 17 00:00:00 2001 -From: Adam Williamson -Date: Tue, 13 Oct 2015 22:33:46 -0700 -Subject: [PATCH] fix #2043: strip empty dict from end of 'pull' stream - -When pulling an image using Docker 1.8, it seems the output -JSON stream has an empty dict at the very end. This causes -ansible to fail when pulling an image, as it's expecting a -status message in that dict which it uses to determine whether -it had to download the image or not. As a bit of an ugly hack -for that which remains backward compatible, try the last item -in the stream, and if it's an empty dict, take the last-but-one -item instead. - -The strip() is needed as the exact value appears to be '{}/r/n'; -we could just match that, but it seems like the kind of thing -where maybe it'd happen to just be '{}/n' or '{}' or something -in some cases, so let's just use strip() in case. ---- - ansible/lib/ansible/modules/cloud/docker/docker.py | 5 +++++ - 1 file changed, 5 insertions(+) - -diff --git ansible.orig/lib/ansible/modules/core/cloud/docker/docker.py ansible/lib/ansible/modules/core/cloud/docker/docker.py -index 4df808f..cc22632 100644 ---- ansible/lib/ansible/modules/core/cloud/docker/docker.py -+++ ansible/lib/ansible/modules/core/cloud/docker/docker.py -@@ -1392,6 +1392,11 @@ class DockerManager(object): - changes = list(self.client.pull(image, tag=tag, stream=True, **extra_params)) - try: - last = changes[-1] -+ # seems Docker 1.8 puts an empty dict at the end of the -+ # stream; catch that and get the previous instead -+ # https://github.com/ansible/ansible-modules-core/issues/2043 -+ if last.strip() == '{}': -+ last = changes[-2] - except IndexError: - last = '{}' - status = json.loads(last).get('status', '') --- -2.5.0 - diff --git a/ansible-1.9.3-dnf.patch b/ansible-1.9.3-dnf.patch deleted file mode 100644 index 632df32..0000000 --- a/ansible-1.9.3-dnf.patch +++ /dev/null @@ -1,11 +0,0 @@ -diff -Nur ansible-1.9.3.orig/lib/ansible/module_utils/facts.py ansible-1.9.3/lib/ansible/module_utils/facts.py ---- ansible-1.9.3.orig/lib/ansible/module_utils/facts.py 2015-09-03 17:03:08.000000000 -0600 -+++ ansible-1.9.3/lib/ansible/module_utils/facts.py 2015-09-03 20:19:33.256457164 -0600 -@@ -115,6 +115,7 @@ - # package manager, put the preferred one last. If there is an - # ansible module, use that as the value for the 'name' key. - PKG_MGRS = [ { 'path' : '/usr/bin/yum', 'name' : 'yum' }, -+ { 'path' : '/usr/bin/dnf', 'name' : 'dnf' }, - { 'path' : '/usr/bin/apt-get', 'name' : 'apt' }, - { 'path' : '/usr/bin/zypper', 'name' : 'zypper' }, - { 'path' : '/usr/sbin/urpmi', 'name' : 'urpmi' }, diff --git a/ansible-1.9.3-yum-return-val.patch b/ansible-1.9.3-yum-return-val.patch deleted file mode 100644 index ff0764c..0000000 --- a/ansible-1.9.3-yum-return-val.patch +++ /dev/null @@ -1,21 +0,0 @@ -diff -ur ansible-1.9.3/lib/ansible/modules/core/packaging/os/yum.py ansible-1.9.4/lib/ansible/modules/core/packaging/os/yum.py ---- ansible-1.9.3/lib/ansible/modules/core/packaging/os/yum.py 2015-09-03 16:03:12.000000000 -0700 -+++ ansible-1.9.4/lib/ansible/modules/core/packaging/os/yum.py 2015-09-04 10:03:32.000000000 -0700 -@@ -733,7 +733,7 @@ - rc, out, err = module.run_command(yum_basecmd + ['check-update']) - if rc == 0 and update_all: - res['results'].append('Nothing to do here, all packages are up to date') -- return res -+ module.exit_json(**res) - elif rc == 100: - available_updates = out.split('\n') - # build update dictionary -@@ -813,7 +813,7 @@ - if len(will_update) > 0 or len(pkgs['install']) > 0: - res['changed'] = True - -- return res -+ module.exit_json(**res) - - # run commands - if cmd: # update all diff --git a/ansible-dnf-backport.patch b/ansible-dnf-backport.patch deleted file mode 100644 index 676e02c..0000000 --- a/ansible-dnf-backport.patch +++ /dev/null @@ -1,974 +0,0 @@ -diff -Nur ansible-1.9.3.orig/lib/ansible/modules/extras/packaging/os/dnf.py ansible-1.9.3/lib/ansible/modules/extras/packaging/os/dnf.py ---- ansible-1.9.3.orig/lib/ansible/modules/extras/packaging/os/dnf.py 2015-09-03 17:03:13.000000000 -0600 -+++ ansible-1.9.3/lib/ansible/modules/extras/packaging/os/dnf.py 2015-10-04 11:16:05.835216353 -0600 -@@ -1,7 +1,8 @@ --#!/usr/bin/python -tt -+#!/usr/bin/python - # -*- coding: utf-8 -*- - --# Written by Cristian van Ee -+# Copyright 2015 Cristian van Ee -+# Copyright 2015 Igor Gnatenko - # - # This file is part of Ansible - # -@@ -19,18 +20,6 @@ - # along with Ansible. If not, see . - # - -- --import traceback --import os --import dnf -- --try: -- from dnf import find_unfinished_transactions, find_ts_remaining -- from rpmUtils.miscutils import splitFilename -- transaction_helpers = True --except: -- transaction_helpers = False -- - DOCUMENTATION = ''' - --- - module: dnf -@@ -45,17 +34,20 @@ - required: true - default: null - aliases: [] -+ - list: - description: - - Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. See examples. - required: false - default: null -+ - state: - description: - - Whether to install (C(present), C(latest)), or remove (C(absent)) a package. - required: false - choices: [ "present", "latest", "absent" ] - default: "present" -+ - enablerepo: - description: - - I(Repoid) of repositories to enable for the install/update operation. -@@ -93,9 +85,11 @@ - notes: [] - # informational: requirements for nodes - requirements: -+ - "python >= 2.6" - - dnf -- - yum-utils (for repoquery) --author: '"Cristian van Ee (@DJMuggs)" ' -+author: -+ - '"Igor Gnatenko (@ignatenkobrain)" ' -+ - '"Cristian van Ee (@DJMuggs)" ' - ''' - - EXAMPLES = ''' -@@ -121,710 +115,231 @@ - dnf: name="@Development tools" state=present - - ''' -+import os - --def_qf = "%{name}-%{version}-%{release}.%{arch}" -- --repoquery='/usr/bin/repoquery' --if not os.path.exists(repoquery): -- repoquery = None -- --dnfbin='/usr/bin/dnf' -- --import syslog -- --def log(msg): -- syslog.openlog('ansible-dnf', 0, syslog.LOG_USER) -- syslog.syslog(syslog.LOG_NOTICE, msg) -- --def dnf_base(conf_file=None, cachedir=False): -- -- my = dnf.Base() -- my.conf.debuglevel=0 -- if conf_file and os.path.exists(conf_file): -- my.conf.config_file_path = conf_file -- my.conf.read() -- my.read_all_repos() -- my.fill_sack() -- -- return my -- --def install_dnf_utils(module): -- -- if not module.check_mode: -- dnf_path = module.get_bin_path('dnf') -- if dnf_path: -- rc, so, se = module.run_command('%s -y install yum-utils' % dnf_path) -- if rc == 0: -- this_path = module.get_bin_path('repoquery') -- global repoquery -- repoquery = this_path -- --def po_to_nevra(po): -- -- if hasattr(po, 'ui_nevra'): -- return po.ui_nevra -- else: -- return '%s-%s-%s.%s' % (po.name, po.version, po.release, po.arch) -- --def is_installed(module, repoq, pkgspec, conf_file, qf=def_qf, en_repos=[], dis_repos=[], is_pkg=False): -- -- if not repoq: -- -- pkgs = [] -- try: -- my = dnf_base(conf_file) -- for rid in en_repos: -- my.repos.enableRepo(rid) -- for rid in dis_repos: -- my.repos.disableRepo(rid) -- -- e,m,u = my.rpmdb.matchPackageNames([pkgspec]) -- pkgs = e + m -- if not pkgs: -- pkgs.extend(my.returnInstalledPackagesByDep(pkgspec)) -- except Exception, e: -- module.fail_json(msg="Failure talking to dnf: %s" % e) -- -- return [ po_to_nevra(p) for p in pkgs ] -- -- else: -- -- cmd = repoq + ["--disablerepo=*", "--pkgnarrow=installed", "--qf", qf, pkgspec] -- rc,out,err = module.run_command(cmd) -- if not is_pkg: -- cmd = repoq + ["--disablerepo=*", "--pkgnarrow=installed", "--qf", qf, "--whatprovides", pkgspec] -- rc2,out2,err2 = module.run_command(cmd) -- else: -- rc2,out2,err2 = (0, '', '') -- -- if rc == 0 and rc2 == 0: -- out += out2 -- return [ p for p in out.split('\n') if p.strip() ] -- else: -- module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err + err2)) -- -- return [] -- --def is_available(module, repoq, pkgspec, conf_file, qf=def_qf, en_repos=[], dis_repos=[]): -- -- if not repoq: -- -- pkgs = [] -- try: -- my = dnf_base(conf_file) -- for rid in en_repos: -- my.repos.enableRepo(rid) -- for rid in dis_repos: -- my.repos.disableRepo(rid) -- -- e,m,u = my.pkgSack.matchPackageNames([pkgspec]) -- pkgs = e + m -- if not pkgs: -- pkgs.extend(my.returnPackagesByDep(pkgspec)) -- except Exception, e: -- module.fail_json(msg="Failure talking to dnf: %s" % e) -- -- return [ po_to_nevra(p) for p in pkgs ] -- -- else: -- myrepoq = list(repoq) -- -- for repoid in dis_repos: -- r_cmd = ['--disablerepo', repoid] -- myrepoq.extend(r_cmd) -- -- for repoid in en_repos: -- r_cmd = ['--enablerepo', repoid] -- myrepoq.extend(r_cmd) -- -- cmd = myrepoq + ["--qf", qf, pkgspec] -- rc,out,err = module.run_command(cmd) -- if rc == 0: -- return [ p for p in out.split('\n') if p.strip() ] -+try: -+ import dnf -+ from dnf import cli, const, exceptions, subject, util -+ HAS_DNF = True -+except ImportError: -+ HAS_DNF = False -+ -+ -+def _fail_if_no_dnf(module): -+ """Fail if unable to import dnf.""" -+ if not HAS_DNF: -+ module.fail_json( -+ msg="`python-dnf` is not installed, but it is required for the Ansible dnf module.") -+ -+ -+def _configure_base(module, base, conf_file, disable_gpg_check): -+ """Configure the dnf Base object.""" -+ conf = base.conf -+ -+ # Turn off debug messages in the output -+ conf.debuglevel = 0 -+ -+ # Set whether to check gpg signatures -+ conf.gpgcheck = not disable_gpg_check -+ -+ # Don't prompt for user confirmations -+ conf.assumeyes = True -+ -+ # Change the configuration file path if provided -+ if conf_file: -+ # Fail if we can't read the configuration file. -+ if not os.access(conf_file, os.R_OK): -+ module.fail_json( -+ msg="cannot read configuration file", conf_file=conf_file) - else: -- module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err)) -+ conf.config_file_path = conf_file - -- -- return [] -+ # Read the configuration file -+ conf.read() - --def is_update(module, repoq, pkgspec, conf_file, qf=def_qf, en_repos=[], dis_repos=[]): -- -- if not repoq: -- -- retpkgs = [] -- pkgs = [] -- updates = [] -- -- try: -- my = dnf_base(conf_file) -- for rid in en_repos: -- my.repos.enableRepo(rid) -- for rid in dis_repos: -- my.repos.disableRepo(rid) -- -- pkgs = my.returnPackagesByDep(pkgspec) + my.returnInstalledPackagesByDep(pkgspec) -- if not pkgs: -- e,m,u = my.pkgSack.matchPackageNames([pkgspec]) -- pkgs = e + m -- updates = my.doPackageLists(pkgnarrow='updates').updates -- except Exception, e: -- module.fail_json(msg="Failure talking to dnf: %s" % e) -- -- for pkg in pkgs: -- if pkg in updates: -- retpkgs.append(pkg) -- -- return set([ po_to_nevra(p) for p in retpkgs ]) - -+def _specify_repositories(base, disablerepo, enablerepo): -+ """Enable and disable repositories matching the provided patterns.""" -+ base.read_all_repos() -+ repos = base.repos -+ -+ # Disable repositories -+ for repo_pattern in disablerepo: -+ for repo in repos.get_matching(repo_pattern): -+ repo.disable() -+ -+ # Enable repositories -+ for repo_pattern in enablerepo: -+ for repo in repos.get_matching(repo_pattern): -+ repo.enable() -+ -+ -+def _base(module, conf_file, disable_gpg_check, disablerepo, enablerepo): -+ """Return a fully configured dnf Base object.""" -+ _fail_if_no_dnf(module) -+ base = dnf.Base() -+ _configure_base(module, base, conf_file, disable_gpg_check) -+ _specify_repositories(base, disablerepo, enablerepo) -+ base.fill_sack() -+ return base -+ -+ -+def _package_dict(package): -+ """Return a dictionary of information for the package.""" -+ # NOTE: This no longer contains the 'dnfstate' field because it is -+ # already known based on the query type. -+ result = { -+ 'name': package.name, -+ 'arch': package.arch, -+ 'epoch': str(package.epoch), -+ 'release': package.release, -+ 'version': package.version, -+ 'repo': package.repoid} -+ result['nevra'] = '{epoch}:{name}-{version}-{release}.{arch}'.format( -+ **result) -+ -+ return result -+ -+ -+def list_items(module, base, command): -+ """List package info based on the command.""" -+ # Rename updates to upgrades -+ if command == 'updates': -+ command = 'upgrades' -+ -+ # Return the corresponding packages -+ if command in ['installed', 'upgrades', 'available']: -+ results = [ -+ _package_dict(package) -+ for package in getattr(base.sack.query(), command)()] -+ # Return the enabled repository ids -+ elif command in ['repos', 'repositories']: -+ results = [ -+ {'repoid': repo.id, 'state': 'enabled'} -+ for repo in base.repos.iter_enabled()] -+ # Return any matching packages - else: -- myrepoq = list(repoq) -- for repoid in dis_repos: -- r_cmd = ['--disablerepo', repoid] -- myrepoq.extend(r_cmd) -- -- for repoid in en_repos: -- r_cmd = ['--enablerepo', repoid] -- myrepoq.extend(r_cmd) -- -- cmd = myrepoq + ["--pkgnarrow=updates", "--qf", qf, pkgspec] -- rc,out,err = module.run_command(cmd) -- -- if rc == 0: -- return set([ p for p in out.split('\n') if p.strip() ]) -- else: -- module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err)) -- -- return [] -- --def what_provides(module, repoq, req_spec, conf_file, qf=def_qf, en_repos=[], dis_repos=[]): -- -- if not repoq: -- -- pkgs = [] -- try: -- my = dnf_base(conf_file) -- for rid in en_repos: -- my.repos.enableRepo(rid) -- for rid in dis_repos: -- my.repos.disableRepo(rid) -- -- pkgs = my.returnPackagesByDep(req_spec) + my.returnInstalledPackagesByDep(req_spec) -- if not pkgs: -- e,m,u = my.pkgSack.matchPackageNames([req_spec]) -- pkgs.extend(e) -- pkgs.extend(m) -- e,m,u = my.rpmdb.matchPackageNames([req_spec]) -- pkgs.extend(e) -- pkgs.extend(m) -- except Exception, e: -- module.fail_json(msg="Failure talking to dnf: %s" % e) -+ packages = subject.Subject(command).get_best_query(base.sack) -+ results = [_package_dict(package) for package in packages] - -- return set([ po_to_nevra(p) for p in pkgs ]) -- -- else: -- myrepoq = list(repoq) -- for repoid in dis_repos: -- r_cmd = ['--disablerepo', repoid] -- myrepoq.extend(r_cmd) -- -- for repoid in en_repos: -- r_cmd = ['--enablerepo', repoid] -- myrepoq.extend(r_cmd) -- -- cmd = myrepoq + ["--qf", qf, "--whatprovides", req_spec] -- rc,out,err = module.run_command(cmd) -- cmd = myrepoq + ["--qf", qf, req_spec] -- rc2,out2,err2 = module.run_command(cmd) -- if rc == 0 and rc2 == 0: -- out += out2 -- pkgs = set([ p for p in out.split('\n') if p.strip() ]) -- if not pkgs: -- pkgs = is_installed(module, repoq, req_spec, conf_file, qf=qf) -- return pkgs -- else: -- module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err + err2)) -+ module.exit_json(results=results) - -- return [] - --def transaction_exists(pkglist): -- """ -- checks the package list to see if any packages are -- involved in an incomplete transaction -- """ -- -- conflicts = [] -- if not transaction_helpers: -- return conflicts -- -- # first, we create a list of the package 'nvreas' -- # so we can compare the pieces later more easily -- pkglist_nvreas = [] -- for pkg in pkglist: -- pkglist_nvreas.append(splitFilename(pkg)) -- -- # next, we build the list of packages that are -- # contained within an unfinished transaction -- unfinished_transactions = find_unfinished_transactions() -- for trans in unfinished_transactions: -- steps = find_ts_remaining(trans) -- for step in steps: -- # the action is install/erase/etc., but we only -- # care about the package spec contained in the step -- (action, step_spec) = step -- (n,v,r,e,a) = splitFilename(step_spec) -- # and see if that spec is in the list of packages -- # requested for installation/updating -- for pkg in pkglist_nvreas: -- # if the name and arch match, we're going to assume -- # this package is part of a pending transaction -- # the label is just for display purposes -- label = "%s-%s" % (n,a) -- if n == pkg[0] and a == pkg[4]: -- if label not in conflicts: -- conflicts.append("%s-%s" % (n,a)) -- break -- return conflicts -- --def local_nvra(module, path): -- """return nvra of a local rpm passed in""" -- -- cmd = ['/bin/rpm', '-qp' ,'--qf', -- '%{name}-%{version}-%{release}.%{arch}\n', path ] -- rc, out, err = module.run_command(cmd) -- if rc != 0: -- return None -- nvra = out.split('\n')[0] -- return nvra -- --def pkg_to_dict(pkgstr): -- -- if pkgstr.strip(): -- n,e,v,r,a,repo = pkgstr.split('|') -- else: -- return {'error_parsing': pkgstr} -- -- d = { -- 'name':n, -- 'arch':a, -- 'epoch':e, -- 'release':r, -- 'version':v, -- 'repo':repo, -- 'nevra': '%s:%s-%s-%s.%s' % (e,n,v,r,a) -- } -- -- if repo == 'installed': -- d['dnfstate'] = 'installed' -- else: -- d['dnfstate'] = 'available' -+def _mark_package_install(module, base, pkg_spec): -+ """Mark the package for install.""" -+ try: -+ base.install(pkg_spec) -+ except exceptions.MarkingError: -+ module.fail_json(msg="No package {} available.".format(pkg_spec)) - -- return d - --def repolist(module, repoq, qf="%{repoid}"): -+def ensure(module, base, state, names): -+ if not util.am_i_root(): -+ module.fail_json(msg="This command has to be run under the root user.") - -- cmd = repoq + ["--qf", qf, "-a"] -- rc,out,err = module.run_command(cmd) -- ret = [] -- if rc == 0: -- ret = set([ p for p in out.split('\n') if p.strip() ]) -- return ret -- --def list_stuff(module, conf_file, stuff): -- -- qf = "%{name}|%{epoch}|%{version}|%{release}|%{arch}|%{repoid}" -- repoq = [repoquery, '--show-duplicates', '--plugins', '--quiet', '-q'] -- if conf_file and os.path.exists(conf_file): -- repoq += ['-c', conf_file] -- -- if stuff == 'installed': -- return [ pkg_to_dict(p) for p in is_installed(module, repoq, '-a', conf_file, qf=qf) if p.strip() ] -- elif stuff == 'updates': -- return [ pkg_to_dict(p) for p in is_update(module, repoq, '-a', conf_file, qf=qf) if p.strip() ] -- elif stuff == 'available': -- return [ pkg_to_dict(p) for p in is_available(module, repoq, '-a', conf_file, qf=qf) if p.strip() ] -- elif stuff == 'repos': -- return [ dict(repoid=name, state='enabled') for name in repolist(module, repoq) if name.strip() ] -+ if names == ['*'] and state == 'latest': -+ base.upgrade_all() - else: -- return [ pkg_to_dict(p) for p in is_installed(module, repoq, stuff, conf_file, qf=qf) + is_available(module, repoq, stuff, conf_file, qf=qf) if p.strip() ] -- --def install(module, items, repoq, dnf_basecmd, conf_file, en_repos, dis_repos): -- -- res = {} -- res['results'] = [] -- res['msg'] = '' -- res['rc'] = 0 -- res['changed'] = False -- -- for spec in items: -- pkg = None -- -- # check if pkgspec is installed (if possible for idempotence) -- # localpkg -- if spec.endswith('.rpm') and '://' not in spec: -- # get the pkg name-v-r.arch -- if not os.path.exists(spec): -- res['msg'] += "No Package file matching '%s' found on system" % spec -- module.fail_json(**res) -- -- nvra = local_nvra(module, spec) -- # look for them in the rpmdb -- if is_installed(module, repoq, nvra, conf_file, en_repos=en_repos, dis_repos=dis_repos): -- # if they are there, skip it -- continue -- pkg = spec -- -- # URL -- elif '://' in spec: -- pkg = spec -- -- #groups :( -- elif spec.startswith('@'): -- # complete wild ass guess b/c it's a group -- pkg = spec -- -- # range requires or file-requires or pkgname :( -- else: -- # most common case is the pkg is already installed and done -- # short circuit all the bs - and search for it as a pkg in is_installed -- # if you find it then we're done -- if not set(['*','?']).intersection(set(spec)): -- pkgs = is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos, is_pkg=True) -- if pkgs: -- res['results'].append('%s providing %s is already installed' % (pkgs[0], spec)) -- continue -- -- # look up what pkgs provide this -- pkglist = what_provides(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos) -- if not pkglist: -- res['msg'] += "No Package matching '%s' found available, installed or updated" % spec -- module.fail_json(**res) -- -- # if any of the packages are involved in a transaction, fail now -- # so that we don't hang on the dnf operation later -- conflicts = transaction_exists(pkglist) -- if len(conflicts) > 0: -- res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts) -- module.fail_json(**res) -- -- # if any of them are installed -- # then nothing to do -- -- found = False -- for this in pkglist: -- if is_installed(module, repoq, this, conf_file, en_repos=en_repos, dis_repos=dis_repos, is_pkg=True): -- found = True -- res['results'].append('%s providing %s is already installed' % (this, spec)) -- break -- -- # if the version of the pkg you have installed is not in ANY repo, but there are -- # other versions in the repos (both higher and lower) then the previous checks won't work. -- # so we check one more time. This really only works for pkgname - not for file provides or virt provides -- # but virt provides should be all caught in what_provides on its own. -- # highly irritating -- if not found: -- if is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos): -- found = True -- res['results'].append('package providing %s is already installed' % (spec)) -- -- if found: -- continue -- -- # if not - then pass in the spec as what to install -- # we could get here if nothing provides it but that's not -- # the error we're catching here -- pkg = spec -- -- cmd = dnf_basecmd + ['install', pkg] -- -- if module.check_mode: -- module.exit_json(changed=True) -- -- changed = True -- -- rc, out, err = module.run_command(cmd) -- -- # Fail on invalid urls: -- if (rc == 1 and '://' in spec and ('No package %s available.' % spec in out or 'Cannot open: %s. Skipping.' % spec in err)): -- err = 'Package at %s could not be installed' % spec -- module.fail_json(changed=False,msg=err,rc=1) -- elif (rc != 0 and 'Nothing to do' in err) or 'Nothing to do' in out: -- # avoid failing in the 'Nothing To Do' case -- # this may happen with an URL spec. -- # for an already installed group, -- # we get rc = 0 and 'Nothing to do' in out, not in err. -- rc = 0 -- err = '' -- out = '%s: Nothing to do' % spec -- changed = False -- -- res['rc'] += rc -- res['results'].append(out) -- res['msg'] += err -- -- # FIXME - if we did an install - go and check the rpmdb to see if it actually installed -- # look for the pkg in rpmdb -- # look for the pkg via obsoletes -- -- # accumulate any changes -- res['changed'] |= changed -- -- module.exit_json(**res) -- -- --def remove(module, items, repoq, dnf_basecmd, conf_file, en_repos, dis_repos): -- -- res = {} -- res['results'] = [] -- res['msg'] = '' -- res['changed'] = False -- res['rc'] = 0 -- -- for pkg in items: -- is_group = False -- # group remove - this is doom on a stick -- if pkg.startswith('@'): -- is_group = True -- else: -- if not is_installed(module, repoq, pkg, conf_file, en_repos=en_repos, dis_repos=dis_repos): -- res['results'].append('%s is not installed' % pkg) -- continue -- -- # run an actual dnf transaction -- cmd = dnf_basecmd + ["remove", pkg] -- -- if module.check_mode: -- module.exit_json(changed=True) -- -- rc, out, err = module.run_command(cmd) -- -- res['rc'] += rc -- res['results'].append(out) -- res['msg'] += err -- -- # compile the results into one batch. If anything is changed -- # then mark changed -- # at the end - if we've end up failed then fail out of the rest -- # of the process -- -- # at this point we should check to see if the pkg is no longer present -- -- if not is_group: # we can't sensibly check for a group being uninstalled reliably -- # look to see if the pkg shows up from is_installed. If it doesn't -- if not is_installed(module, repoq, pkg, conf_file, en_repos=en_repos, dis_repos=dis_repos): -- res['changed'] = True -- else: -- module.fail_json(**res) -- -- if rc != 0: -- module.fail_json(**res) -- -- module.exit_json(**res) -- --def latest(module, items, repoq, dnf_basecmd, conf_file, en_repos, dis_repos): -- -- res = {} -- res['results'] = [] -- res['msg'] = '' -- res['changed'] = False -- res['rc'] = 0 -- -- for spec in items: -- -- pkg = None -- basecmd = 'update' -- cmd = '' -- # groups, again -- if spec.startswith('@'): -- pkg = spec -- -- elif spec == '*': #update all -- # use check-update to see if there is any need -- rc,out,err = module.run_command(dnf_basecmd + ['check-update']) -- if rc == 100: -- cmd = dnf_basecmd + [basecmd] -- else: -- res['results'].append('All packages up to date') -- continue -- -- # dep/pkgname - find it -- else: -- if is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos): -- basecmd = 'update' -+ pkg_specs, group_specs, filenames = cli.commands.parse_spec_group_file( -+ names) -+ if group_specs: -+ base.read_comps() -+ -+ groups = [] -+ for group_spec in group_specs: -+ group = base.comps.group_by_pattern(group_spec) -+ if group: -+ groups.append(group) - else: -- basecmd = 'install' -+ module.fail_json( -+ msg="No group {} available.".format(group_spec)) - -- pkglist = what_provides(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos) -- if not pkglist: -- res['msg'] += "No Package matching '%s' found available, installed or updated" % spec -- module.fail_json(**res) -- -- nothing_to_do = True -- for this in pkglist: -- if basecmd == 'install' and is_available(module, repoq, this, conf_file, en_repos=en_repos, dis_repos=dis_repos): -- nothing_to_do = False -- break -- -- if basecmd == 'update' and is_update(module, repoq, this, conf_file, en_repos=en_repos, dis_repos=en_repos): -- nothing_to_do = False -- break -- -- if nothing_to_do: -- res['results'].append("All packages providing %s are up to date" % spec) -- continue -- -- # if any of the packages are involved in a transaction, fail now -- # so that we don't hang on the dnf operation later -- conflicts = transaction_exists(pkglist) -- if len(conflicts) > 0: -- res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts) -- module.fail_json(**res) -- -- pkg = spec -- if not cmd: -- cmd = dnf_basecmd + [basecmd, pkg] -- -- if module.check_mode: -- return module.exit_json(changed=True) -- -- rc, out, err = module.run_command(cmd) -- -- res['rc'] += rc -- res['results'].append(out) -- res['msg'] += err -- -- # FIXME if it is - update it and check to see if it applied -- # check to see if there is no longer an update available for the pkgspec -+ if state in ['installed', 'present']: -+ # Install files. -+ for filename in filenames: -+ base.package_install(base.add_remote_rpm(filename)) -+ # Install groups. -+ for group in groups: -+ base.group_install(group, const.GROUP_PACKAGE_TYPES) -+ # Install packages. -+ for pkg_spec in pkg_specs: -+ _mark_package_install(module, base, pkg_spec) -+ -+ elif state == 'latest': -+ # "latest" is same as "installed" for filenames. -+ for filename in filenames: -+ base.package_install(base.add_remote_rpm(filename)) -+ for group in groups: -+ try: -+ base.group_upgrade(group) -+ except exceptions.CompsError: -+ # If not already installed, try to install. -+ base.group_install(group, const.GROUP_PACKAGE_TYPES) -+ for pkg_spec in pkg_specs: -+ try: -+ base.upgrade(pkg_spec) -+ except dnf.exceptions.MarkingError: -+ # If not already installed, try to install. -+ _mark_package_install(module, base, pkg_spec) - -- if rc: -- res['failed'] = True - else: -- res['changed'] = True -- -- module.exit_json(**res) -- --def ensure(module, state, pkgspec, conf_file, enablerepo, disablerepo, -- disable_gpg_check): -- -- # take multiple args comma separated -- items = pkgspec.split(',') -- -- # need debug level 2 to get 'Nothing to do' for groupinstall. -- dnf_basecmd = [dnfbin, '-d', '2', '-y'] -+ if filenames: -+ module.fail_json( -+ msg="Cannot remove paths -- please specify package name.") -+ -+ installed = base.sack.query().installed() -+ for group in groups: -+ if installed.filter(name=group.name): -+ base.group_remove(group) -+ for pkg_spec in pkg_specs: -+ if installed.filter(name=pkg_spec): -+ base.remove(pkg_spec) - -- -- if not repoquery: -- repoq = None -+ if not base.resolve(): -+ module.exit_json(msg="Nothing to do") - else: -- repoq = [repoquery, '--show-duplicates', '--plugins', '--quiet', '-q'] -- -- if conf_file and os.path.exists(conf_file): -- dnf_basecmd += ['-c', conf_file] -- if repoq: -- repoq += ['-c', conf_file] -- -- dis_repos =[] -- en_repos = [] -- if disablerepo: -- dis_repos = disablerepo.split(',') -- if enablerepo: -- en_repos = enablerepo.split(',') -- -- for repoid in dis_repos: -- r_cmd = ['--disablerepo=%s' % repoid] -- dnf_basecmd.extend(r_cmd) -- -- for repoid in en_repos: -- r_cmd = ['--enablerepo=%s' % repoid] -- dnf_basecmd.extend(r_cmd) -- -- if state in ['installed', 'present', 'latest']: -- my = dnf_base(conf_file) -- try: -- for r in dis_repos: -- my.repos.disableRepo(r) -+ if module.check_mode: -+ module.exit_json(changed=True) -+ base.download_packages(base.transaction.install_set) -+ base.do_transaction() -+ response = {'changed': True, 'results': []} -+ for package in base.transaction.install_set: -+ response['results'].append("Installed: {}".format(package)) -+ for package in base.transaction.remove_set: -+ response['results'].append("Removed: {}".format(package)) - -- current_repos = dnf.yum.config.RepoConf() -- for r in en_repos: -- try: -- my.repos.enableRepo(r) -- new_repos = my.repos.repos.keys() -- for i in new_repos: -- if not i in current_repos: -- rid = my.repos.getRepo(i) -- a = rid.repoXML.repoid -- current_repos = new_repos -- except dnf.exceptions.Error, e: -- module.fail_json(msg="Error setting/accessing repo %s: %s" % (r, e)) -- except dnf.exceptions.Error, e: -- module.fail_json(msg="Error accessing repos: %s" % e) -- -- if state in ['installed', 'present']: -- if disable_gpg_check: -- dnf_basecmd.append('--nogpgcheck') -- install(module, items, repoq, dnf_basecmd, conf_file, en_repos, dis_repos) -- elif state in ['removed', 'absent']: -- remove(module, items, repoq, dnf_basecmd, conf_file, en_repos, dis_repos) -- elif state == 'latest': -- if disable_gpg_check: -- dnf_basecmd.append('--nogpgcheck') -- latest(module, items, repoq, dnf_basecmd, conf_file, en_repos, dis_repos) -+ module.exit_json(**response) - -- # should be caught by AnsibleModule argument_spec -- return dict(changed=False, failed=True, results='', errors='unexpected state') - - def main(): -- -- # state=installed name=pkgspec -- # state=removed name=pkgspec -- # state=latest name=pkgspec -- # -- # informational commands: -- # list=installed -- # list=updates -- # list=available -- # list=repos -- # list=pkgspec -- -+ """The main function.""" - module = AnsibleModule( -- argument_spec = dict( -- name=dict(aliases=['pkg']), -- # removed==absent, installed==present, these are accepted as aliases -- state=dict(default='installed', choices=['absent','present','installed','removed','latest']), -- enablerepo=dict(), -- disablerepo=dict(), -+ argument_spec=dict( -+ name=dict(aliases=['pkg'], type='list'), -+ state=dict( -+ default='installed', -+ choices=[ -+ 'absent', 'present', 'installed', 'removed', 'latest']), -+ enablerepo=dict(type='list', default=[]), -+ disablerepo=dict(type='list', default=[]), - list=dict(), - conf_file=dict(default=None), -- disable_gpg_check=dict(required=False, default="no", type='bool'), -- # this should not be needed, but exists as a failsafe -- install_repoquery=dict(required=False, default="yes", type='bool'), -+ disable_gpg_check=dict(default=False, type='bool'), - ), -- required_one_of = [['name','list']], -- mutually_exclusive = [['name','list']], -- supports_check_mode = True -- ) -- -- # this should not be needed, but exists as a failsafe -+ required_one_of=[['name', 'list']], -+ mutually_exclusive=[['name', 'list']], -+ supports_check_mode=True) - params = module.params -- if params['install_repoquery'] and not repoquery and not module.check_mode: -- install_dnf_utils(module) -- -- if not repoquery: -- module.fail_json(msg="repoquery is required to use this module at this time. Please install the yum-utils package.") -+ base = _base( -+ module, params['conf_file'], params['disable_gpg_check'], -+ params['disablerepo'], params['enablerepo']) - if params['list']: -- results = dict(results=list_stuff(module, params['conf_file'], params['list'])) -- module.exit_json(**results) -- -+ list_items(module, base, params['list']) - else: -- pkg = params['name'] -- state = params['state'] -- enablerepo = params.get('enablerepo', '') -- disablerepo = params.get('disablerepo', '') -- disable_gpg_check = params['disable_gpg_check'] -- res = ensure(module, state, pkg, params['conf_file'], enablerepo, -- disablerepo, disable_gpg_check) -- module.fail_json(msg="we should never get here unless this all failed", **res) -+ ensure(module, base, params['state'], params['name']) -+ - - # import module snippets - from ansible.module_utils.basic import * --main() -- -+if __name__ == '__main__': -+ main() diff --git a/ansible.spec b/ansible.spec index 539bed1..767e9ac 100644 --- a/ansible.spec +++ b/ansible.spec @@ -9,25 +9,20 @@ BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot Name: ansible Summary: SSH-based configuration management, deployment, and task execution system -Version: 1.9.4 -Release: 2%{?dist} +Version: 2.0.0.1 +Release: 1%{?dist} Group: Development/Libraries License: GPLv3+ Source0: http://releases.ansible.com/ansible/%{name}-%{version}.tar.gz +# Use get-unittests.sh tags/v2.0.0-0.6.rc1 to retrieve the unittests +# (Replace the tags/ parameter with the tag or hash that you want to sync with) +Source1: ansible-unittests.tar.xz +Source100: get-unittests.sh +# I think this was left out of the tarballs by mistake Url: http://ansible.com -# -# Patch to detect dnf as package manager. -# already upstream with https://github.com/opoplawski/ansible/commit/f624ec4cb8771736ffbe3fe81b2949edda159863 -# https://bugzilla.redhat.com/show_bug.cgi?id=1258080 -Patch0: ansible-1.9.3-dnf.patch -# Backport of the master branch dnf module. -Patch2: ansible-dnf-backport.patch -# Backport fix for #2043, crash when pulling Docker images: -# https://github.com/ansible/ansible-modules-core/commit/64b8596250e58a55eee8f2d4323d35ca32a8cd53 -Patch3: 0001-fix-2043-strip-empty-dict-from-end-of-pull-stream.patch - BuildArch: noarch + %if 0%{?rhel} && 0%{?rhel} <= 5 BuildRequires: python26-devel @@ -40,12 +35,33 @@ Requires: python26-httplib2 BuildRequires: python2-devel BuildRequires: python-setuptools +# For tests +BuildRequires: PyYAML +BuildRequires: python-paramiko +BuildRequires: python-jinja2 +BuildRequires: python-keyczar +BuildRequires: python-httplib2 +BuildRequires: python-setuptools +BuildRequires: python-six +BuildRequires: python-nose +BuildRequires: python-coverage +BuildRequires: python-mock + +%if (0%{?rhel} && 0%{?rhel} <= 6) +# Distros with python < 2.7.0 +BuildRequires: python-unittest2 +# Warning: Following is not suitable for EPEL proper. See the %%check section +# for a full explanation +BuildRequires: python-pip +%endif + Requires: PyYAML Requires: python-paramiko Requires: python-jinja2 Requires: python-keyczar Requires: python-httplib2 Requires: python-setuptools +Requires: python-six Requires: sshpass %endif @@ -59,6 +75,10 @@ Requires: python-crypto2.6 # The python-2.6 stdlib json module has a bug that affects the ansible # to_nice_json filter Requires: python-simplejson + +# For testing +BuildRequires: python-crypto2.6 +BuildRequires: python-simplejson %endif # @@ -80,9 +100,8 @@ are transferred to managed machines automatically. %prep %setup -q -%patch0 -p1 -%patch2 -p1 -%patch3 -p1 +# Unittests +tar -xJvf %{SOURCE1} %build %{__python} setup.py build @@ -98,6 +117,10 @@ cp -v docs/man/man1/*.1 $RPM_BUILD_ROOT/%{_mandir}/man1/ mkdir -p $RPM_BUILD_ROOT/%{_datadir}/ansible mkdir -p $RPM_BUILD_ROOT/%{_datadir}/ansible_plugins/{action,callback,connection,lookup,vars,filter}_plugins +# RHEL <= 6 doesn't have a new enough python-mock to run the tests +%if 0%{?fedora} || 0%{?rhel} >= 7 +make tests +%endif %clean rm -rf $RPM_BUILD_ROOT @@ -106,13 +129,16 @@ rm -rf $RPM_BUILD_ROOT %defattr(-,root,root) %{python_sitelib}/ansible* %{_bindir}/ansible* -%{_datadir}/ansible -%{_datadir}/ansible_plugins -%config(noreplace) %{_sysconfdir}/ansible -%doc README.md PKG-INFO COPYING +%config(noreplace) %{_sysconfdir}/ansible/ +%doc README.md PKG-INFO COPYING CHANGELOG.md %doc %{_mandir}/man1/ansible* %changelog +* Tue Jan 12 2016 Toshio Kuratomi - 2.0.0.1-1 +- Ansible 2.0.0.1 from upstream. Rewrite with many bugfixes, rewritten code, + and new features. See the upstream changelog for details: + https://github.com/ansible/ansible/blob/devel/CHANGELOG.md + * Wed Oct 14 2015 Adam Williamson - 1.9.4-2 - backport upstream fix for GH #2043 (crash when pulling Docker images) diff --git a/get-unittests.sh b/get-unittests.sh new file mode 100755 index 0000000..891021d --- /dev/null +++ b/get-unittests.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +if test -d ansible-temp ; then + pushd ansible-temp + git checkout devel + git pull --rebase + popd +else + git clone https://github.com/ansible/ansible.git ansible-temp +fi + +pushd ansible-temp +if test -n "$1" ; then + git checkout "$1" +fi +popd +tar -cJvf ansible-unittests.tar.xz -C ansible-temp/ test/units diff --git a/sources b/sources index 6633c2d..6837829 100644 --- a/sources +++ b/sources @@ -1 +1,2 @@ -ad411285d4a78e78e3a49f4ee2c7cc3f ansible-1.9.4.tar.gz +b4ea2bc7323668c42e15e47cba7ee5be ansible-2.0.0.1.tar.gz +29789b6ac5a12664fdca6abf7b29b0d9 ansible-unittests.tar.xz