[Piuparts-commits] [SCM] piuparts git repository branch, master, updated. eda668423fa87898c59d1075118693714aa5a053
Holger Levsen
holger at layer-acht.org
Fri Dec 23 10:26:57 UTC 2011
The following commit has been merged in the master branch:
commit 29a296fb63464ccc57b15b9e06ee0230c826c364
Merge: 7fd706f372c85f99a8feeff519189cc6369ad509 9c9001df8eb4ee0c3453a62f0474a1a6ac0a74cc
Author: Holger Levsen <holger at layer-acht.org>
Date: Sun Nov 27 12:16:33 2011 +0100
merge eatmydata again
diff --combined piuparts.py
index ea89dcd,3a335ca..6df939e
--- a/piuparts.py
+++ b/piuparts.py
@@@ -49,7 -49,6 +49,7 @@@ import subproces
import unittest
import urllib
import uuid
+from signal import signal, SIGTERM, SIGKILL
try:
from debian import deb822
@@@ -59,13 -58,13 +59,13 @@@ except ImportError
class Defaults:
"""Default settings which depend on flavor of Debian.
-
+
Some settings, such as the default mirror and distribution, depend on
which flavor of Debian we run under: Debian itself, or a derived
distribution such as Ubuntu. This class abstracts away the defaults
so that the rest of the code can just refer to the values defined
herein.
-
+
"""
def get_components(self):
@@@ -73,10 -72,10 +73,10 @@@
def get_mirror(self):
"""Return default mirror."""
-
+
def get_distribution(self):
"""Return default distribution."""
-
+
class DebianDefaults(Defaults):
@@@ -105,13 -104,13 +105,13 @@@ class UbuntuDefaults(Defaults)
class DefaultsFactory:
"""Instantiate the right defaults class."""
-
+
def guess_flavor(self):
p = subprocess.Popen(["lsb_release", "-i", "-s"],
stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
return stdout.strip().lower()
-
+
def new_defaults(self):
if not settings.defaults:
settings.defaults = self.guess_flavor()
@@@ -127,7 -126,7 +127,7 @@@
class Settings:
"""Global settings for this program."""
-
+
def __init__(self):
self.defaults = None
self.tmpdir = None
@@@ -154,7 -153,6 +154,7 @@@
self.no_upgrade_test = False
self.skip_cronfiles_test = False
self.skip_logrotatefiles_test = False
+ self.check_broken_diversions = True
self.check_broken_symlinks = True
self.warn_broken_symlinks = True
self.debfoster_options = None
@@@ -255,7 -253,6 +255,7 @@@
"/usr/lib/python2\../site-packages/debconf.py[co]",
"/var/backups/.*",
"/var/cache/man/(/.*)?",
+ "/var/lib/apt/lists/.*",
"/var/lib/cvs(/.*)?",
"/var/lib/dpkg/alternatives",
"/var/lib/dpkg/triggers/.*",
@@@ -327,7 -324,7 +327,7 @@@ def setup_logging(log_level, log_file_n
handler.setFormatter(formatter)
logger.addHandler(handler)
HANDLERS.append(handler)
-
+
if log_file_name:
handler = logging.FileHandler(log_file_name)
handler.setFormatter(formatter)
@@@ -383,30 -380,9 +383,30 @@@ def run(command, ignore_errors=False)
env["LC_ALL"] = "C"
env["LANGUAGES"] = ""
env["PIUPARTS_OBJECTS"] = ' '.join(str(vobject) for vobject in settings.testobjects )
- p = subprocess.Popen(command, env=env, stdin=subprocess.PIPE,
+ devnull = open('/dev/null', 'r')
+ p = subprocess.Popen(command, env=env, stdin=devnull,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- (output, _) = p.communicate()
+ output = ""
+ excessive_output = False
+ while p.poll() is None:
+ """Read 64 KB chunks, but depending on the output buffering behavior
+ of the command we may get less even if more output is coming later.
+ Abort after reading 2 MB."""
+ output += p.stdout.read(1 << 16)
+ if (len(output) > (1 << 21)):
+ excessive_output = True
+ logging.error("Terminating command due to excessive output")
+ p.terminate()
+ for i in range(10):
+ time.sleep(0.5)
+ if p.poll() is not None:
+ break
+ else:
+ logging.error("Killing command due to excessive output")
+ p.kill()
+ p.wait()
+ break
+ devnull.close()
if output:
dump("\n" + indent_string(output.rstrip("\n")))
@@@ -419,8 -395,6 +419,8 @@@
else:
logging.error("Command failed (status=%d): %s\n%s" %
(p.returncode, repr(command), indent_string(output)))
+ if excessive_output:
+ logging.error("Command was terminated while producing excessive output")
panic()
return p.returncode, output
@@@ -467,7 -441,7 +467,7 @@@ def remove_files(filenames)
def make_metapackage(name, depends, conflicts):
"""Return the path to a .deb created just for satisfying dependencies
-
+
Caller is responsible for removing the temporary directory containing the
.deb when finished.
"""
@@@ -499,78 -473,32 +499,78 @@@
return os.path.join(tmpdir, name) + '.deb'
-def is_broken_symlink(root, dirpath, filename):
- """Is symlink dirpath+filename broken?
-
- When resolving the symlink, pretend (similar to chroot) that root is
- the root of the filesystem. Note that this does NOT work completely
- correctly if the symlink target contains .. path components. This is
- good enough for my immediate purposes, but nowhere near good enough
- for anything that needs to be secure. For that, use chroot and have
- the kernel resolve symlinks instead.
+def split_path(pathname):
+ parts = []
+ while pathname:
+ (head, tail) = os.path.split(pathname)
+ #print "split '%s' => '%s' + '%s'" % (pathname, head, tail)
+ if tail:
+ parts.append(tail)
+ elif not head:
+ break
+ elif head == pathname:
+ parts.append(head)
+ break
+ pathname = head
+ return parts
+
+def canonicalize_path(root, pathname):
+ """Canonicalize a path name, simulating chroot at 'root'.
+
+ When resolving the symlink, pretend (similar to chroot) that
+ 'root' is the root of the filesystem. Also resolve '..' and
+ '.' components. This should not escape the chroot below
+ 'root', but for security concerns, use chroot and have the
+ kernel resolve symlinks instead.
"""
-
- pathname = os.path.join(dirpath, filename)
- i = 0
- while os.path.islink(pathname):
- if i >= 10: # let's avoid infinite loops...
- return True
- i += 1
- target = os.readlink(pathname)
- if os.path.isabs(target):
- pathname = os.path.join(root, target[1:]) # Assume Unix filenames
+ #print "\nCANONICALIZE %s %s" % (root, pathname)
+ seen = []
+ parts = split_path(pathname)
+ #print "PARTS ", list(reversed(parts))
+ path = "/"
+ while parts:
+ tag = "\n".join(parts + [path])
+ #print "TEST '%s' + " % path, list(reversed(parts))
+ if tag in seen or len(seen) > 1024:
+ fullpath = os.path.join(path, *reversed(parts))
+ #print "LOOP %s" % fullpath
+ path = fullpath
+ logging.error("ELOOP: Too many symbolic links in '%s'" % path)
+ break
+ seen.append(tag)
+ part = parts.pop()
+ # Using normpath() to cleanup '.', '..' and multiple slashes.
+ # Removing a suffix 'foo/..' is safe here since it can't change the
+ # meaning of 'path' because it contains no symlinks - they have been
+ # resolved already.
+ newpath = os.path.normpath(os.path.join(path, part))
+ rootedpath = os.path.join(root, newpath[1:])
+ if newpath == "/":
+ path = "/"
+ elif os.path.islink(rootedpath):
+ target = os.readlink(rootedpath)
+ #print "LINK to '%s'" % target
+ if os.path.isabs(target):
+ path = "/"
+ parts.extend(split_path(target))
else:
- pathname = os.path.join(os.path.dirname(pathname), target)
+ path = newpath
+ #print "FINAL '%s'" % path
+ return path
+
+
+def is_broken_symlink(root, dirpath, filename):
+ """Is symlink dirpath+filename broken?"""
+
+ if dirpath[:len(root)] == root:
+ dirpath = dirpath[len(root):]
+ pathname = canonicalize_path(root, os.path.join(dirpath, filename))
+ pathname = os.path.join(root, pathname[1:])
# The symlink chain, if any, has now been resolved. Does the target
# exist?
+ #print "EXISTS ", pathname, os.path.exists(pathname)
return not os.path.exists(pathname)
@@@ -594,25 -522,10 +594,25 @@@ class IsBrokenSymlinkTests(unittest.Tes
self.symlink("absolute-broken", "absolute-broken-to-symlink")
self.symlink("/", "absolute-works")
self.symlink("/absolute-works", "absolute-works-to-symlink")
-
+ os.mkdir(os.path.join(self.testdir, "dir"))
+ self.symlink("dir", "dir-link")
+ os.mkdir(os.path.join(self.testdir, "dir/subdir"))
+ self.symlink("subdir", "dir/subdir-link")
+ self.symlink("notexist/", "trailing-slash-broken")
+ self.symlink("dir/", "trailing-slash-works")
+ self.symlink("selfloop", "selfloop")
+ self.symlink("/absolute-selfloop", "absolute-selfloop")
+ self.symlink("../dir/selfloop", "dir/selfloop")
+ self.symlink("../dir-link/selfloop", "dir/selfloop1")
+ self.symlink("../../dir/subdir/selfloop", "dir/subdir/selfloop")
+ self.symlink("../../dir-link/subdir/selfloop", "dir/subdir/selfloop1")
+ self.symlink("../../link/subdir-link/selfloop", "dir/subdir/selfloop2")
+ self.symlink("../../dir-link/subdir-link/selfloop", "dir/subdir/selfloop3")
+ self.symlink("explode/bomb", "explode")
+
def tearDown(self):
shutil.rmtree(self.testdir)
-
+
def testRelativeBroken(self):
self.failUnless(is_broken_symlink(self.testdir, self.testdir,
"relative-broken"))
@@@ -628,37 -541,7 +628,37 @@@
def testAbsoluteBrokenToSymlink(self):
self.failUnless(is_broken_symlink(self.testdir, self.testdir,
"absolute-broken-to-symlink"))
-
+
+ def testTrailingSlashBroken(self):
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "trailing-slash-broken"))
+
+ def testSelfLoopBroken(self):
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "selfloop"))
+
+ def testExpandingSelfLoopBroken(self):
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "explode"))
+
+ def testAbsoluteSelfLoopBroken(self):
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "absolute-selfloop"))
+
+ def testSubdirSelfLoopBroken(self):
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "dir/selfloop"))
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "dir/selfloop1"))
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "dir/subdir/selfloop"))
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "dir/subdir/selfloop1"))
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "dir/subdir/selfloop2"))
+ self.failUnless(is_broken_symlink(self.testdir, self.testdir,
+ "dir/subdir/selfloop3"))
+
def testRelativeWorks(self):
self.failIf(is_broken_symlink(self.testdir, self.testdir,
"relative-works"))
@@@ -675,10 -558,6 +675,10 @@@
self.failIf(is_broken_symlink(self.testdir, self.testdir,
"absolute-works-to-symlink"))
+ def testTrailingSlashWorks(self):
+ self.failIf(is_broken_symlink(self.testdir, self.testdir,
+ "trailing-slash-works"))
+
def testMultiLevelNestedSymlinks(self):
# target/first-link -> ../target/second-link -> ../target
@@@ -688,27 -567,13 +688,27 @@@
self.failIf(is_broken_symlink(self.testdir, self.testdir,
"target/first-link"))
+ def testMultiLevelNestedAbsoluteSymlinks(self):
+ # first-link -> /second-link/final-target
+ # second-link -> /target-dir
+
+ os.mkdir(os.path.join(self.testdir, "final-dir"))
+ os.mkdir(os.path.join(self.testdir, "final-dir/final-target"))
+ self.symlink("/second-link/final-target", "first-link")
+ self.symlink("/final-dir", "second-link")
+ self.failIf(is_broken_symlink(self.testdir, self.testdir,
+ "first-link"))
+
class Chroot:
"""A chroot for testing things in."""
-
+
def __init__(self):
self.name = None
+
+ self.pre_install_diversions = None
+ self.post_install_diversions = None
def create_temp_dir(self):
"""Create a temporary directory for the chroot."""
@@@ -734,6 -599,7 +734,6 @@@
if settings.basetgz:
self.run(["apt-get", "-yf", "upgrade"])
self.minimize()
- self.run(["apt-get", "clean"])
#copy scripts dir into the chroot
if settings.scriptsdir is not None:
@@@ -745,10 -611,9 +745,10 @@@
if (sfile.startswith("post_") or sfile.startswith("pre_")) and os.path.isfile(os.path.join((settings.scriptsdir), sfile)):
shutil.copy(os.path.join((settings.scriptsdir), sfile), dest)
+ self.pre_install_diversions = self.get_diversions()
+
# Run custom scripts after creating the chroot.
- if settings.scriptsdir is not None:
- self.run_scripts("post_setup")
+ self.run_scripts("post_setup")
if settings.savetgz:
self.pack_into_tgz(settings.savetgz)
@@@ -767,7 -632,7 +767,7 @@@
shutil.rmtree(self.name)
logging.debug("Removed directory tree at %s" % self.name)
elif settings.keep_tmpdir:
- logging.debug("Keeping directory tree at %s" % self.name)
+ logging.debug("Keeping directory tree at %s" % self.name)
def create_temp_tgz_file(self):
"""Return the path to a file to be used as a temporary tgz file"""
@@@ -778,7 -643,6 +778,7 @@@
def pack_into_tgz(self, result):
"""Tar and compress all files in the chroot."""
+ self.run(["apt-get", "clean"])
logging.debug("Saving %s to %s." % (self.name, result))
run(['tar', '--exclude', './proc/*', '-czf', result, '-C', self.name, './'])
@@@ -817,11 -681,11 +817,11 @@@
for mirror, components in settings.debian_mirrors:
lines.append("deb %s %s %s\n" %
(mirror, distro, " ".join(components)))
- create_file(os.path.join(self.name, "etc/apt/sources.list"),
+ create_file(self.relative("etc/apt/sources.list"),
"".join(lines))
def create_apt_conf(self):
- """Create /etc/apt/apt.conf inside the chroot."""
+ """Create /etc/apt/apt.conf.d/piuparts inside the chroot."""
lines = [
'APT::Get::Assume-Yes "yes";\n',
'APT::Install-Recommends "0";\n',
@@@ -848,7 -712,7 +848,7 @@@
if settings.dpkg_force_confdef:
lines.append('Dpkg::Options {"--force-confdef";};\n')
- create_file(self.relative("etc/apt/apt.conf"),
+ create_file(self.relative("etc/apt/apt.conf.d/piuparts"),
"".join(lines))
def create_dpkg_conf(self):
@@@ -860,17 -724,15 +860,17 @@@
lines.append('force-confdef\n')
logging.info("Warning: dpkg has been configured to use the force-confdef option. This will hide problems, see #466118.")
if lines:
+ if not os.path.exists(self.relative("etc/dpkg/dpkg.cfg.d")):
+ os.mkdir(self.relative("etc/dpkg/dpkg.cfg.d"))
create_file(self.relative("etc/dpkg/dpkg.cfg.d/piuparts"),
"".join(lines))
def create_policy_rc_d(self):
"""Create a policy-rc.d that prevents daemons from running."""
- full_name = os.path.join(self.name, "usr/sbin/policy-rc.d")
+ full_name = self.relative("usr/sbin/policy-rc.d")
create_file(full_name, "#!/bin/sh\nexit 101\n")
- os.chmod(full_name, 0777)
- logging.debug("Created policy-rc.d and chmodded it.")
+ os.chmod(full_name, 0777)
+ logging.debug("Created policy-rc.d and chmodded it.")
def setup_minimal_chroot(self):
"""Set up a minimal Debian system in a chroot."""
@@@ -914,8 -776,9 +914,8 @@@
for distro in distros:
logging.debug("Upgrading %s to %s" % (self.name, distro))
self.create_apt_sources(distro)
- # Run custom scripts before upgrade
- if settings.scriptsdir is not None:
- self.run_scripts("pre_distupgrade")
+ # Run custom scripts before upgrade
+ self.run_scripts("pre_distupgrade")
self.run(["apt-get", "update"])
self.run(["apt-get", "-yf", "dist-upgrade"])
# Sometimes dist-upgrade won't upgrade the packages we want
@@@ -925,13 -788,14 +925,13 @@@
# packages. So, we force the installation like this.
self.install_packages_by_name(packages)
# Run custom scripts after upgrade
- if settings.scriptsdir is not None:
- self.run_scripts("post_distupgrade")
+ self.run_scripts("post_distupgrade")
self.check_for_no_processes()
-
- def apt_get_knows(self, package_names):
+
+ def apt_get_knows(self, packages):
"""Does apt-get (or apt-cache) know about a set of packages?"""
- for name in package_names:
+ for name in packages:
(status, output) = self.run(["apt-cache", "show", name],
ignore_errors=True)
if status != 0:
@@@ -941,7 -805,7 +941,7 @@@
def copy_files(self, source_names, target_name):
"""Copy files in 'source_name' to file/dir 'target_name', relative
to the root of the chroot."""
- target_name = os.path.join(self.name, target_name)
+ target_name = self.relative(target_name)
logging.debug("Copying %s to %s" %
(", ".join(source_names), target_name))
for source_name in source_names:
@@@ -951,7 -815,7 +951,7 @@@
logging.error("Error copying %s to %s: %s" %
(source_name, target_name, detail))
panic()
-
+
def list_installed_files (self, pre_info, post_info):
"""List the new files installed, removed and modified between two dir trees.
Actually, it is a nice output of the funcion diff_meta_dat."""
@@@ -961,7 -825,7 +961,7 @@@
if new:
logging.debug("New installed files on system:\n" + file_list(new, file_owners))
else:
- logging.debug("The package did not install any new file.\n")
+ logging.debug("The package did not install any new file.\n")
if removed:
logging.debug("The following files have disappeared:\n" +
@@@ -971,16 -835,17 +971,16 @@@
logging.debug("The following files have been modified:\n" +
file_list(modified, file_owners))
else:
- logging.debug("The package did not modify any file.\n")
+ logging.debug("The package did not modify any file.\n")
- def install_package_files(self, filenames):
- if filenames:
- self.copy_files(filenames, "tmp")
- tmp_files = [os.path.basename(a) for a in filenames]
+ def install_package_files(self, package_files):
+ if package_files:
+ self.copy_files(package_files, "tmp")
+ tmp_files = [os.path.basename(a) for a in package_files]
tmp_files = [os.path.join("tmp", name) for name in tmp_files]
- if settings.scriptsdir is not None:
- self.run_scripts("pre_install")
+ self.run_scripts("pre_install")
if settings.list_installed_files:
pre_info = self.save_meta_data()
@@@ -997,9 -862,12 +997,9 @@@
logging.info ("Installation of %s ok", tmp_files)
- if settings.scriptsdir is not None:
- self.run_scripts("post_install")
+ self.run_scripts("post_install")
- self.run(["apt-get", "clean"])
- remove_files([os.path.join(self.name, name)
- for name in tmp_files])
+ remove_files([self.relative(name) for name in tmp_files])
def get_selections(self):
"""Get current package selections in a chroot."""
@@@ -1010,44 -878,17 +1010,44 @@@
vdict[name] = status
return vdict
+ def get_diversions(self):
+ """Get current dpkg-divert --list in a chroot."""
+ if not settings.check_broken_diversions:
+ return
+ (status, output) = self.run(["dpkg-divert", "--list"])
+ lines = []
+ for line in output.split("\n"):
+ lines.append(line)
+ return lines
+
+
+ def check_for_broken_diversions(self):
+ """Check that diversions in chroot are identical (though potenttially reordered)."""
+ if not settings.check_broken_diversions:
+ return
+ if self.pre_install_diversions and self.post_install_diversions:
+ added = [ln for ln in self.pre_install_diversions if not ln in self.post_install_diversions]
+ removed = [ln for ln in self.post_install_diversions if not ln in self.pre_install_diversions]
+ if added:
+ logging.error("Error: Installed diversions (dpkg-divert) not removed by purge:\n%s" %
+ indent_string("\n".join(added)))
+ if removed:
+ logging.error("Error: Existing diversions (dpkg-divert) removed/modified:\n%s" %
+ indent_string("\n".join(removed)))
+
+
def remove_or_purge(self, operation, packages):
"""Remove or purge packages in a chroot."""
for name in packages:
self.run(["dpkg", "--" + operation, name], ignore_errors=True)
self.run(["dpkg", "--remove", "--pending"], ignore_errors=True)
-
- def restore_selections(self, changes, packages):
- """Restore package selections in a chroot by applying 'changes'.
- 'changes' is a return value from diff_selections."""
-
+
+ def restore_selections(self, selections, packages):
+ """Restore package selections in a chroot to the state in
+ 'selections'."""
+
+ changes = diff_selections(self, selections)
deps = {}
nondeps = {}
for name, state in changes.iteritems():
@@@ -1055,7 -896,7 +1055,7 @@@
nondeps[name] = state
else:
deps[name] = state
-
+
deps_to_remove = [name for name, state in deps.iteritems()
if state == "remove"]
deps_to_purge = [name for name, state in deps.iteritems()
@@@ -1064,29 -905,29 +1064,29 @@@
if state == "remove"]
nondeps_to_purge = [name for name, state in nondeps.iteritems()
if state == "purge"]
-
+
# Run custom scripts before removing all packages.
- if settings.scriptsdir is not None:
- self.run_scripts("pre_remove")
+ self.run_scripts("pre_remove")
# First remove all packages.
self.remove_or_purge("remove", deps_to_remove + deps_to_purge +
nondeps_to_remove + nondeps_to_purge)
# Run custom scripts after removing all packages.
- if settings.scriptsdir is not None:
- self.run_scripts("post_remove")
+ self.run_scripts("post_remove")
if not settings.skip_cronfiles_test:
cronfiles, cronfiles_list = self.check_if_cronfiles(packages)
-
+
if not settings.skip_cronfiles_test and cronfiles:
self.check_output_cronfiles(cronfiles_list)
if not settings.skip_logrotatefiles_test:
logrotatefiles, logrotatefiles_list = self.check_if_logrotatefiles(packages)
-
+
if not settings.skip_logrotatefiles_test and logrotatefiles:
+ installed = self.install_logrotate()
self.check_output_logrotatefiles(logrotatefiles_list)
+ self.remove_or_purge("purge", installed)
# Then purge all packages being depended on.
self.remove_or_purge("purge", deps_to_purge)
@@@ -1094,8 -935,6 +1094,8 @@@
# Finally, purge actual packages.
self.remove_or_purge("purge", nondeps_to_purge)
+ self.post_install_diversions = self.get_diversions()
+
# remove logrotate and it's depends
# (this is a fix for #602409 introduced by #566597
# - search for the latter bug number in this file)
@@@ -1106,7 -945,8 +1106,7 @@@
self.run(["apt-get", "clean"])
# Run custom scripts after purge all packages.
- if settings.scriptsdir is not None:
- self.run_scripts("post_purge")
+ self.run_scripts("post_purge")
# Now do a final run to see that everything worked.
self.run(["dpkg", "--purge", "--pending"])
@@@ -1114,8 -954,7 +1114,8 @@@
def save_meta_data(self):
"""Return the filesystem meta data for all objects in the chroot."""
- root = os.path.join(self.name, ".")
+ self.run(["apt-get", "clean"])
+ root = self.relative(".")
vdict = {}
proc = os.path.join(root, "proc")
for dirpath, dirnames, filenames in os.walk(root):
@@@ -1156,18 -995,16 +1156,18 @@@
def install_packages_by_name(self, packages):
if packages:
- if settings.scriptsdir is not None:
- self.run_scripts("pre_install")
+ self.run_scripts("pre_install")
- if settings.list_installed_files:
+ if settings.list_installed_files:
pre_info = self.save_meta_data()
self.run(["apt-get", "-y", "install"] + packages)
self.list_installed_files (pre_info, self.save_meta_data())
else:
self.run(["apt-get", "-y", "install"] + packages)
+ self.run_scripts("post_install")
+
+
def check_for_no_processes(self):
"""Check there are no processes running inside the chroot."""
(status, output) = run(["lsof", "-w", "+D", self.name], ignore_errors=True)
@@@ -1175,21 -1012,6 +1175,21 @@@
if count > 0:
logging.error("FAIL: Processes are running inside chroot:\n%s" %
indent_string(output))
+ for signo in [ 15, 9 ]:
+ p = subprocess.Popen(["lsof", "-t", "+D", self.name],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ stdout, _ = p.communicate()
+ if stdout:
+ pidlist = [int(pidstr) for pidstr in stdout.split("\n") if len(pidstr)]
+ for pid in pidlist:
+ if pid > 0:
+ try:
+ if signo == 15:
+ os.kill(pid, SIGTERM)
+ else:
+ os.kill(pid, SIGKILL)
+ except OSError:
+ pass
panic()
@@@ -1253,7 -1075,7 +1253,7 @@@
panic()
else:
logging.debug("No broken symlinks as far as we can find.")
-
+
def check_if_cronfiles(self, packages):
"""Check if the packages have cron files under /etc/cron.d and in case positive,
it returns the list of files. """
@@@ -1264,7 -1086,7 +1264,7 @@@
for p in packages:
basename = p + ".list"
- if not os.path.exists(os.path.join(vdir,basename)):
+ if not os.path.exists(os.path.join(vdir,basename)):
continue
f = file(os.path.join(vdir,basename), "r")
@@@ -1286,7 -1108,7 +1286,7 @@@
def check_output_cronfiles (self, list):
"""Check if a given list of cronfiles has any output. Executes
- cron file as cron would do (except for SHELL)"""
+ cron file as cron would do (except for SHELL)"""
failed = False
for vfile in list:
@@@ -1311,7 -1133,7 +1311,7 @@@
for p in packages:
basename = p + ".list"
- if not os.path.exists(os.path.join(vdir,basename)):
+ if not os.path.exists(os.path.join(vdir,basename)):
continue
f = file(os.path.join(vdir,basename), "r")
@@@ -1327,18 -1149,13 +1327,18 @@@
return has_logrotatefiles, vlist
+ def install_logrotate(self):
+ """Install logrotate for check_output_logrotatefiles, and return the
+ list of packages that were installed"""
+ old_selections = self.get_selections()
+ self.run(['apt-get', 'install', '-y', 'logrotate'])
+ diff = diff_selections(self, old_selections)
+ return diff.keys()
+
def check_output_logrotatefiles (self, list):
"""Check if a given list of logrotatefiles has any output. Executes
logrotate file as logrotate would do from cron (except for SHELL)"""
failed = False
- # XXX That's a crude hack (to fix #602409). Can't we define a set of needed packages differently?
- # It also introduces the need for hack to fix #602409 in piuparts.py
- (a,b) = self.run(['apt-get','install', '-y', 'logrotate'])
for vfile in list:
if not os.path.exists(self.relative(vfile.strip("/"))):
@@@ -1355,8 -1172,6 +1355,8 @@@
def run_scripts (self, step):
""" Run custom scripts to given step post-install|remove|purge"""
+ if settings.scriptsdir is None:
+ return
logging.info("Running scripts "+ step)
basepath = self.relative("tmp/scripts/")
if not os.path.exists(basepath):
@@@ -1692,11 -1507,13 +1692,11 @@@ def diff_meta_data(tree1, tree2)
for name1, data1 in removed[:]:
m = pat1.search(name1)
if m:
- pat2 = re.compile(r"^" + m.group(1) + r"[SK][0-9]{2}" + m.group(2) +
-r"$")
+ pat2 = re.compile(r"^" + m.group(1) + r"[SK][0-9]{2}" + m.group(2) + r"$")
for name2, data2 in new[:]:
m = pat2.search(name2)
if m:
- logging.debug("File was renamed: %s\t=> %s" % (name1,
-name2))
+ logging.debug("File was renamed: %s\t=> %s" % (name1, name2))
removed.remove((name1, data1))
new.remove((name2, data2))
# this is again special casing due to the behaviour of a single package :(
@@@ -1714,8 -1531,8 +1714,8 @@@ def file_list(meta_infos, file_owners)
vlist.append(" %s\t" % name)
if name in file_owners:
vlist.append(" owned by: %s\n" % ", ".join(file_owners[name]))
- else:
- vlist.append(" not owned\n")
+ else:
+ vlist.append(" not owned\n")
return "".join(vlist)
@@@ -1758,10 -1575,10 +1758,10 @@@ def diff_selections(chroot, selections)
return changes
-def get_package_names_from_package_files(filenames):
+def get_package_names_from_package_files(package_files):
"""Return list of package names given list of package file names."""
vlist = []
- for filename in filenames:
+ for filename in package_files:
(status, output) = run(["dpkg", "--info", filename])
for line in [line.lstrip() for line in output.split("\n")]:
if line[:len("Package:")] == "Package:":
@@@ -1811,7 -1628,7 +1811,7 @@@ def process_changes(changes)
def check_results(chroot, root_info, file_owners, deps_info=None):
"""Check that current chroot state matches 'root_info'.
-
+
If settings.warn_on_others is True and deps_info is not None, then only
print a warning rather than failing if the current chroot contains files
that are in deps_info but not in root_info. (In this case, deps_info
@@@ -1874,7 -1691,7 +1874,7 @@@
return ok
-def install_purge_test(chroot, root_info, selections, package_list, packages):
+def install_purge_test(chroot, root_info, selections, package_files, packages):
"""Do an install-purge test. Return True if successful, False if not.
Assume 'root' is a directory already populated with a working
chroot, with packages in states given by 'selections'."""
@@@ -1883,11 -1700,11 +1883,11 @@@
if settings.warn_on_others:
# Create a metapackage with dependencies from the given packages
- if package_list:
+ if package_files:
control_infos = []
# We were given package files, so let's get the Depends and
# Conflicts directly from the .debs
- for deb in package_list:
+ for deb in package_files:
returncode, output = run(["dpkg", "-f", deb])
control = deb822.Deb822(output)
control_infos.append(control)
@@@ -1898,7 -1715,7 +1898,7 @@@
apt_cache_args.extend(packages)
returncode, output = chroot.run(apt_cache_args)
control_infos = deb822.Deb822.iter_paragraphs(output.splitlines())
-
+
depends = []
conflicts = []
for control in control_infos:
@@@ -1910,7 -1727,7 +1910,7 @@@
all_conflicts = ", ".join(conflicts)
metapackage = make_metapackage("piuparts-depends-dummy",
all_depends, all_conflicts)
-
+
# Install the metapackage
chroot.install_package_files([metapackage])
# Now remove it
@@@ -1925,10 -1742,11 +1925,10 @@@
else:
deps_info = None
- if package_list:
- chroot.install_package_files(package_list)
+ if package_files:
+ chroot.install_package_files(package_files)
else:
chroot.install_packages_by_name(packages)
- chroot.run(["apt-get", "clean"])
chroot.check_for_no_processes()
@@@ -1940,35 -1758,33 +1940,35 @@@
changes = diff_selections(chroot, selections)
chroot.restore_selections(changes, packages)
+ chroot.check_for_broken_diversions()
chroot.check_for_broken_symlinks()
return check_results(chroot, root_info, file_owners, deps_info=deps_info)
-def install_upgrade_test(chroot, root_info, selections, package_list, package_names):
+def install_upgrade_test(chroot, root_info, selections, package_files, packages):
"""Install package via apt-get, then upgrade from package files.
Return True if successful, False if not."""
# First install via apt-get.
- chroot.install_packages_by_name(package_names)
-
- if settings.scriptsdir is not None:
- chroot.run_scripts("pre_upgrade")
+ chroot.install_packages_by_name(packages)
+
+ chroot.run_scripts("pre_upgrade")
+ chroot.check_for_no_processes()
chroot.check_for_broken_symlinks()
# Then from the package files.
- chroot.install_package_files(package_list)
-
+ chroot.install_package_files(package_files)
+
+ chroot.check_for_no_processes()
+ chroot.check_for_broken_symlinks()
+
file_owners = chroot.get_files_owned_by_packages()
- # Remove all packages from the chroot that weren't there
- # initially.
- changes = diff_selections(chroot, selections)
- chroot.restore_selections(changes, package_names)
-
+ # Remove all packages from the chroot that weren't there initially.
+ chroot.restore_selections(selections, packages)
+
chroot.check_for_no_processes()
chroot.check_for_broken_symlinks()
@@@ -1992,7 -1808,7 +1992,7 @@@ def load_meta_data(filename)
return root_info, selections
-def install_and_upgrade_between_distros(filenames, packages):
+def install_and_upgrade_between_distros(package_files, packages):
"""Install package and upgrade it between distributions, then remove.
Return True if successful, False if not."""
@@@ -2027,23 -1843,22 +2027,23 @@@
else:
root_tgz = chroot.create_temp_tgz_file()
chroot.pack_into_tgz(root_tgz)
-
+
if settings.end_meta:
# load root_info and selections
root_info, selections = load_meta_data(settings.end_meta)
else:
chroot.upgrade_to_distros(settings.debian_distros[1:], [])
- chroot.run(["apt-get", "clean"])
+
+ chroot.check_for_no_processes()
# set root_info and selections
root_info = chroot.save_meta_data()
selections = chroot.get_selections()
-
+
if settings.save_end_meta:
# save root_info and selections
save_meta_data(settings.save_end_meta, root_info, selections)
-
+
chroot.remove()
dont_do_on_panic(cid)
chroot = get_chroot()
@@@ -2055,9 -1870,11 +2055,9 @@@
chroot.check_for_no_processes()
- chroot.run(["apt-get", "update"])
chroot.install_packages_by_name(packages)
- if settings.scriptsdir is not None:
- chroot.run_scripts("pre_upgrade")
+ chroot.run_scripts("pre_upgrade")
chroot.check_for_no_processes()
@@@ -2065,18 -1882,20 +2065,18 @@@
chroot.check_for_no_processes()
- chroot.install_package_files(filenames)
- chroot.run(["apt-get", "clean"])
-
+ chroot.install_package_files(package_files)
+
chroot.check_for_no_processes()
file_owners = chroot.get_files_owned_by_packages()
# use root_info and selections
- changes = diff_selections(chroot, selections)
- chroot.restore_selections(changes, packages)
+ chroot.restore_selections(selections, packages)
result = check_results(chroot, root_info, file_owners)
chroot.check_for_no_processes()
-
+
if root_tgz != settings.basetgz:
remove_files([root_tgz])
chroot.remove()
@@@ -2121,11 -1940,11 +2121,11 @@@ def set_basetgz_to_pbuilder(option, opt
def parse_command_line():
"""Parse the command line, change global settings, return non-options."""
-
+
parser = optparse.OptionParser(usage="%prog [options] package ...",
version="piuparts %s" % VERSION)
-
-
+
+
parser.add_option("-a", "--apt", action="store_true", default=False,
help="Command line arguments are package names " +
"to be installed via apt.")
@@@ -2134,7 -1953,7 +2134,7 @@@
metavar='CMDLINE', default=None,
help="Use CMDLINE via autopkgtest (adt-virt-*)"
" protocol instead of managing a chroot.")
-
+
parser.add_option("-b", "--basetgz", metavar="TARBALL",
help="Use TARBALL as the contents of the initial " +
"chroot, instead of building a new one with " +
@@@ -2143,7 -1962,7 +2143,7 @@@
parser.add_option("--bindmount", action="append", metavar="DIR",
default=[],
help="Directory to be bind-mounted inside the chroot.")
-
+
parser.add_option("-d", "--distribution", action="append", metavar="NAME",
help="Which Debian distribution to use: a code name " +
"(for example lenny, squeeze, sid) or experimental. The " +
@@@ -2152,26 -1971,26 +2152,31 @@@
parser.add_option("-D", "--defaults", action="store",
help="Choose which set of defaults to use "
"(debian/ubuntu).")
-
+
parser.add_option("--debfoster-options",
default="-o MaxPriority=required -o UseRecommends=no -f -n apt debfoster",
- help="Run debfoster with different parameters (default: -o MaxPriority=required -o UseRecommends=no -f -n apt debfoster).")
+ help="Run debfoster with different parameters (default: -o MaxPriority=required -o UseRecommends=no -f -n apt debfoster).")
+
+ parser.add_option("--no-eatmydata",
+ default=False,
+ action='store_true',
+ help="Default is to use libeatmydata in the chroot")
+ parser.add_option("--no-eatmydata",
+ default=False,
+ action='store_true',
+ help="Default is to use libeatmydata in the chroot")
+
parser.add_option("--dpkg-noforce-unsafe-io",
default=False,
action='store_true',
- help="Default is to run dpkg with --force-unsafe-io option, which causes dpkg to skip certain file system syncs known to cause substantial performance degradation on some filesystems. This option turns that off and dpkg will use safe I/O operations.")
+ help="Default is to run dpkg with --force-unsafe-io option, which causes dpkg to skip certain file system syncs known to cause substantial performance degradation on some filesystems. This option turns that off and dpkg will use safe I/O operations.")
parser.add_option("--dpkg-force-confdef",
default=False,
action='store_true',
- help="Make dpkg use --force-confdef, which lets dpkg always choose the default action when a modified conffile is found. This option will make piuparts ignore errors it was designed to report and therefore should only be used to hide problems in depending packages. (See #466118.)")
-
+ help="Make dpkg use --force-confdef, which lets dpkg always choose the default action when a modified conffile is found. This option will make piuparts ignore errors it was designed to report and therefore should only be used to hide problems in depending packages. (See #466118.)")
+
parser.add_option("--do-not-verify-signatures", default=False,
action='store_true',
help="Do not verify signatures from the Release files when running debootstrap.")
@@@ -2180,13 -1999,13 +2185,13 @@@
default=[],
help="Add FILENAME to list of filenames to be " +
"ignored when comparing changes to chroot.")
-
+
parser.add_option("-I", "--ignore-regex", action="append",
metavar="REGEX", default=[],
help="Add REGEX to list of Perl compatible regular " +
"expressions for filenames to be " +
"ignored when comparing changes to chroot.")
-
+
parser.add_option("-k", "--keep-tmpdir",
action="store_true", default=False,
help="Don't remove the temporary directory for the " +
@@@ -2195,7 -2014,7 +2200,7 @@@
parser.add_option("-K", "--keyring", metavar="FILE",
default = "/usr/share/keyrings/debian-archive-keyring.gpg",
help="Use FILE as the keyring to use with debootstrap when creating chroots.")
-
+
parser.add_option("--keep-sources-list",
action="store_true", default=False,
help="Don't modify the chroot's " +
@@@ -2209,7 -2028,7 +2214,7 @@@
parser.add_option("--list-installed-files",
action="store_true", default=False,
help="List files added to the chroot after the " +
- "installation of the package.")
+ "installation of the package.")
parser.add_option("--lvm-volume", metavar="LVM-VOL", action="store",
help="Use LVM-VOL as source for the chroot, instead of building " +
@@@ -2219,20 -2038,16 +2224,20 @@@
parser.add_option("--lvm-snapshot-size", metavar="SNAPSHOT-SIZE", action="store",
default="1G", help="Use SNAPSHOT-SIZE as snapshot size when creating " +
"a new LVM snapshot (default: 1G)")
-
+
parser.add_option("-m", "--mirror", action="append", metavar="URL",
default=[],
help="Which Debian mirror to use.")
+ parser.add_option("--no-diversions", action="store_true",
+ default=False,
+ help="Don't check for broken diversions.")
+
parser.add_option("-n", "--no-ignores", action="callback",
callback=forget_ignores,
help="Forget all ignores set so far, including " +
"built-in ones.")
-
+
parser.add_option("-N", "--no-symlinks", action="store_true",
default=False,
help="Don't check for broken symlinks.")
@@@ -2240,8 -2055,8 +2245,8 @@@
parser.add_option("--no-upgrade-test",
action="store_true", default=False,
help="Skip testing the upgrade from an existing version " +
- "in the archive.")
-
+ "in the archive.")
+
parser.add_option("--no-install-purge-test",
action="store_true", default=False,
help="Skip install and purge test.")
@@@ -2254,13 -2069,13 +2259,13 @@@
parser.add_option("--pedantic-purge-test",
action="store_true", default=False,
help="Be pedantic when checking if a purged package leaves files behind. If this option is not set, files left in /tmp are ignored.")
-
+
parser.add_option("-s", "--save", metavar="FILENAME",
help="Save the chroot into FILENAME.")
parser.add_option("-B", "--end-meta", metavar="FILE",
help="Save chroot package selection and file meta data in FILE for later use. See the function install_and_upgrade_between_distros() in piuparts.py for defaults. Mostly useful for large scale distro upgrade tests.")
-
+
parser.add_option("-S", "--save-end-meta", metavar="FILE",
help="Load chroot package selection and file meta data from FILE. See the function install_and_upgrade_between_distros() in piuparts.py for defaults. Mostly useful for large scale distro upgrade tests.")
@@@ -2271,7 -2086,7 +2276,7 @@@
parser.add_option("--skip-cronfiles-test",
action="store_true", default=False,
help="Skip testing the output from the cron files.")
-
+
parser.add_option("--skip-logrotatefiles-test",
action="store_true", default=False,
help="Skip testing the output from the logrotate files.")
@@@ -2286,7 -2101,7 +2291,7 @@@
parser.add_option("--scriptsdir", metavar="DIR",
help="Directory where are placed the custom scripts.")
-
+
parser.add_option("-t", "--tmpdir", metavar="DIR",
help="Use DIR for temporary storage. Default is " +
"$TMPDIR or /tmp.")
@@@ -2315,7 -2130,7 +2320,7 @@@
parser.add_option("--fail-on-broken-symlinks", action="store_true",
default=False,
help="Fail if broken symlinks are detected.")
-
+
parser.add_option("--log-level", action="store",metavar='LEVEL',
default="dump",
help="Displays messages from LEVEL level, possible values are: error, info, dump, debug. The default is dump.")
@@@ -2336,8 -2151,6 +2341,8 @@@
settings.keep_sources_list = opts.keep_sources_list
settings.skip_minimize = opts.skip_minimize
settings.minimize = opts.minimize
+ if settings.minimize:
+ settings.skip_minimize = False
settings.list_installed_files = opts.list_installed_files
settings.no_install_purge_test = opts.no_install_purge_test
settings.no_upgrade_test = opts.no_upgrade_test
@@@ -2354,14 -2167,13 +2359,14 @@@
settings.pedantic_purge_test = opts.pedantic_purge_test
if not settings.pedantic_purge_test:
settings.ignored_patterns += settings.non_pedantic_ignore_patterns
-
+
log_file_name = opts.log_file
defaults = DefaultsFactory().new_defaults()
-
+
settings.debian_mirrors = [parse_mirror_spec(x, defaults.get_components())
for x in opts.mirror]
+ settings.check_broken_diversions = not opts.no_diversions
settings.check_broken_symlinks = not opts.no_symlinks
settings.warn_broken_symlinks = not opts.fail_on_broken_symlinks
settings.savetgz = opts.save
@@@ -2403,7 -2215,7 +2408,7 @@@
if opts.scriptsdir is not None:
settings.scriptsdir = opts.scriptsdir
- if not os.path.isdir(settings.scriptsdir):
+ if not os.path.isdir(settings.scriptsdir):
logging.error("Scripts directory is not a directory: %s" %
settings.scriptsdir)
panic()
@@@ -2432,7 -2244,7 +2437,7 @@@
sys.exit(exitcode)
return args
-
+
def get_chroot():
if settings.adt_virt is None: return Chroot()
@@@ -2443,10 -2255,9 +2448,10 @@@ def process_packages(package_list)
# Find the names of packages.
if settings.args_are_package_files:
packages = get_package_names_from_package_files(package_list)
+ package_files = package_list
else:
packages = package_list
- package_list = []
+ package_files = []
if len(settings.debian_distros) == 1:
chroot = get_chroot()
@@@ -2458,7 -2269,7 +2463,7 @@@
if not settings.no_install_purge_test:
if not install_purge_test(chroot, root_info, selections,
- package_list, packages):
+ package_files, packages):
logging.error("FAIL: Installation and purging test.")
panic()
logging.info("PASS: Installation and purging test.")
@@@ -2468,17 -2279,17 +2473,17 @@@
logging.info("Can't test upgrades: -a or --apt option used.")
elif not chroot.apt_get_knows(packages):
logging.info("Can't test upgrade: packages not known by apt-get.")
- elif install_upgrade_test(chroot, root_info, selections, package_list,
+ elif install_upgrade_test(chroot, root_info, selections, package_files,
packages):
logging.info("PASS: Installation, upgrade and purging tests.")
else:
logging.error("FAIL: Installation, upgrade and purging tests.")
panic()
-
+
chroot.remove()
dont_do_on_panic(cid)
else:
- if install_and_upgrade_between_distros(package_list, packages):
+ if install_and_upgrade_between_distros(package_files, packages):
logging.info("PASS: Upgrading between Debian distributions.")
else:
logging.error("FAIL: Upgrading between Debian distributions.")
@@@ -2545,5 -2356,3 +2550,5 @@@ if __name__ == "__main__"
print ''
print 'Piuparts interrupted by the user, exiting...'
sys.exit(1)
+
+# vi:set et ts=4 sw=4 :
--
piuparts git repository
More information about the Piuparts-commits
mailing list