[Piuparts-commits] [piuparts] 01/02: Use "autopep8 --max-line-length=160" to re-format all .py files.
Holger Levsen
holger at moszumanska.debian.org
Sun Apr 19 10:00:25 UTC 2015
This is an automated email from the git hooks/post-receive script.
holger pushed a commit to branch develop
in repository piuparts.
commit d0c97185b5b38e6196b2338f6c40477f580d22e1
Author: Holger Levsen <holger at layer-acht.org>
Date: Sun Apr 19 09:55:41 2015 +0000
Use "autopep8 --max-line-length=160" to re-format all .py files.
---
debian/changelog | 1 +
master-bin/detect_well_known_errors.py | 37 +++--
piuparts-analyze.py | 13 +-
piuparts-master-backend.py | 34 +++--
piuparts-report.py | 169 +++++++++++----------
piuparts-slave.py | 85 +++++------
piuparts.py | 265 +++++++++++++++++++--------------
piupartslib/__init__.py | 1 +
piupartslib/conf.py | 20 +--
piupartslib/dwke.py | 16 +-
piupartslib/packagesdb.py | 25 ++--
piupartslib/pkgsummary.py | 63 ++++----
tests/test_config.py | 20 +--
tests/test_pkgsummary.py | 40 ++---
14 files changed, 430 insertions(+), 359 deletions(-)
diff --git a/debian/changelog b/debian/changelog
index d684e33..fad33a1 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -61,6 +61,7 @@ piuparts (0.63) UNRELEASED; urgency=medium
- Switch links to lintian.debian.org to https.
* crontab-slave.in: try to start slave every hour to make sure it's always
running.
+ * Use "autopep8 --max-line-length=160" to re-format all .py files.
-- Andreas Beckmann <anbe at debian.org> Wed, 03 Dec 2014 20:36:30 +0100
diff --git a/master-bin/detect_well_known_errors.py b/master-bin/detect_well_known_errors.py
index f81e2eb..a3346a5 100755
--- a/master-bin/detect_well_known_errors.py
+++ b/master-bin/detect_well_known_errors.py
@@ -37,17 +37,18 @@ KPR_DIRS = ('pass', 'bugged', 'affected', 'fail')
class WKE_Config(piupartslib.conf.Config):
+
"""Configuration parameters for Well Known Errors"""
def __init__(self):
self.section = 'global'
piupartslib.conf.Config.__init__(self, self.section,
- {
- "sections": "report",
- "master-directory": ".",
- "known-problem-directory": "@sharedir@/piuparts/known_problems",
- }, "")
+ {
+ "sections": "report",
+ "master-directory": ".",
+ "known-problem-directory": "@sharedir@/piuparts/known_problems",
+ }, "")
def setup_logging(log_level):
@@ -61,9 +62,11 @@ def write_file(filename, contents):
with file(filename, "w") as f:
f.write(contents)
+
def mtime(path):
return os.path.getmtime(path)
+
def clean_cache_files(logdict, cachedict, recheck=False, recheck_failed=False,
skipnewer=False):
"""Delete files in cachedict if the corresponding logdict file is missing
@@ -73,10 +76,10 @@ def clean_cache_files(logdict, cachedict, recheck=False, recheck_failed=False,
for pkgspec in cachedict:
try:
if pkgspec not in logdict \
- or (mtime(logdict[pkgspec])>mtime(cachedict[pkgspec]) and not skipnewer)\
- or get_where(logdict[pkgspec]) != get_where(cachedict[pkgspec])\
- or recheck\
- or (recheck_failed and not get_where(cachedict[pkgspec]) in ['pass']):
+ or (mtime(logdict[pkgspec]) > mtime(cachedict[pkgspec]) and not skipnewer)\
+ or get_where(logdict[pkgspec]) != get_where(cachedict[pkgspec])\
+ or recheck\
+ or (recheck_failed and not get_where(cachedict[pkgspec]) in ['pass']):
os.remove(cachedict[pkgspec])
count = count + 1
except (IOError, OSError):
@@ -85,6 +88,7 @@ def clean_cache_files(logdict, cachedict, recheck=False, recheck_failed=False,
return count
+
def make_kprs(logdict, kprdict, problem_list):
"""Create kpr files, as necessary, so every log file has one
kpr entries are e.g.
@@ -144,6 +148,7 @@ def process_section(section, config, problem_list,
return (del_cnt, add_cnt, failures)
+
def detect_well_known_errors(sections, config, problem_list, recheck, recheck_failed):
for section in sections:
@@ -152,8 +157,8 @@ def detect_well_known_errors(sections, config, problem_list, recheck, recheck_fa
logging.info("%s:" % section)
(del_cnt, add_cnt, failures) = \
- process_section(section, config, problem_list,
- recheck, recheck_failed)
+ process_section(section, config, problem_list,
+ recheck, recheck_failed)
logging.info("parsed logfiles: %d removed, %d added" % (del_cnt, add_cnt))
@@ -171,21 +176,21 @@ if __name__ == '__main__':
setup_logging(logging.DEBUG)
parser = argparse.ArgumentParser(
- description="Detect well known errors",
+ description="Detect well known errors",
epilog="""
This script processes all log files against defined "known_problem" files,
caching the problems found, by package, into ".kpr" files.
""")
parser.add_argument('sections', nargs='*', metavar='SECTION',
- help="limit processing to the listed SECTION(s)")
+ help="limit processing to the listed SECTION(s)")
parser.add_argument('--recheck', dest='recheck', action='store_true',
- help="recheck all log files (delete cache)")
+ help="recheck all log files (delete cache)")
parser.add_argument('--recheck-failed', dest='recheck_failed',
- action='store_true',
- help="recheck failed log files (delete cache)")
+ action='store_true',
+ help="recheck failed log files (delete cache)")
args = parser.parse_args()
diff --git a/piuparts-analyze.py b/piuparts-analyze.py
index 9420f65..a7afa7b 100644
--- a/piuparts-analyze.py
+++ b/piuparts-analyze.py
@@ -169,7 +169,7 @@ def mark_bugged_version(failed_log, bugged_log):
def bts_update_found(bugnr, newversion):
if "DEBEMAIL" in os.environ and os.environ["DEBEMAIL"]:
- #subprocess.check_call(('bts', 'found', bugnr, newversion))
+ # subprocess.check_call(('bts', 'found', bugnr, newversion))
print(' '.join(('bts', 'found', str(bugnr), newversion)))
@@ -194,9 +194,9 @@ def mark_logs_with_reported_bugs():
moved = True
break
for bug_version in found_versions:
- #print('DEBUG: %s/%s #%d %s' % (pname, pversion, bug, bug_version))
+ # print('DEBUG: %s/%s #%d %s' % (pname, pversion, bug, bug_version))
- if apt_pkg.version_compare(pversion, bug_version) > 0: # pversion > bug_version
+ if apt_pkg.version_compare(pversion, bug_version) > 0: # pversion > bug_version
bugged_logs = find_bugged_logs(failed_log)
if not bugged_logs and not moved:
print('%s/%s: Maybe the bug was filed earlier: https://bugs.debian.org/%d against %s/%s'
@@ -205,9 +205,9 @@ def mark_logs_with_reported_bugs():
for bugged_log in bugged_logs:
old_pversion = package_source_version(bugged_log)
bugged_errors = extract_errors(bugged_log)
- if (apt_pkg.version_compare(old_pversion, bug_version) == 0 # old_pversion == bug_version
+ if (apt_pkg.version_compare(old_pversion, bug_version) == 0 # old_pversion == bug_version
and
- failed_errors == bugged_errors):
+ failed_errors == bugged_errors):
# a bug was filed for an old version of the package,
# and the errors were the same back then - assume it is the same bug.
if not moved:
@@ -243,6 +243,8 @@ def report_packages_with_many_logs():
piuparts_usertags_cache = None
+
+
def all_piuparts_bugs():
global piuparts_usertags_cache
if piuparts_usertags_cache is None:
@@ -255,6 +257,7 @@ def piuparts_bugs_in(package):
bugs.sort(reverse=True)
return bugs
+
def piuparts_bugs_affecting(package):
bugs = debianbts.get_bugs('affects', package, 'bugs', all_piuparts_bugs(), 'archive', 'both')
bugs.sort(reverse=True)
diff --git a/piuparts-master-backend.py b/piuparts-master-backend.py
index 0613c06..2d0bbb0 100644
--- a/piuparts-master-backend.py
+++ b/piuparts-master-backend.py
@@ -44,10 +44,11 @@ DISTRO_CONFIG_FILE = "/etc/piuparts/distros.conf"
log_handler = None
+
def setup_logging(log_level, log_file_name):
logger = logging.getLogger()
- global log_handler;
+ global log_handler
logger.removeHandler(log_handler)
if log_file_name:
@@ -58,6 +59,7 @@ def setup_logging(log_level, log_file_name):
logger.addHandler(log_handler)
logger.setLevel(log_level)
+
def timestamp():
return time.strftime("[%Y-%m-%d %H:%M:%S]")
@@ -66,18 +68,18 @@ class Config(piupartslib.conf.Config):
def __init__(self, section="master", defaults_section=None):
piupartslib.conf.Config.__init__(self, section,
- {
- "log-file": None,
- "master-directory": ".",
- "proxy": None,
- "mirror": None,
- "distro": None,
- "area": None,
- "arch": None,
- "upgrade-test-distros": None,
- "depends-sections": None,
- },
- defaults_section=defaults_section)
+ {
+ "log-file": None,
+ "master-directory": ".",
+ "proxy": None,
+ "mirror": None,
+ "distro": None,
+ "area": None,
+ "arch": None,
+ "upgrade-test-distros": None,
+ "depends-sections": None,
+ },
+ defaults_section=defaults_section)
class CommandSyntaxError(Exception):
@@ -217,8 +219,8 @@ class Master(Protocol):
self._load_package_database(dep)
db.set_dependency_databases([self._package_databases[dep] for dep in deps])
db.load_packages_urls(
- distro_config.get_packages_urls(
- config.get_distro(),
+ distro_config.get_packages_urls(
+ config.get_distro(),
config.get_area(),
config.get_arch()))
if config.get_distro() != config.get_final_distro():
@@ -253,7 +255,6 @@ class Master(Protocol):
return 0 # stamp outdated
return ttl + random.randrange(120)
-
def do_transaction(self):
line = self._readline()
if line:
@@ -274,6 +275,7 @@ class Master(Protocol):
if len(args) != count:
raise CommandSyntaxError("Need exactly %d args: %s %s" %
(count, command, " ".join(args)))
+
def dump_pkgs(self):
for st in self._binary_db.get_states():
for name in self._binary_db.get_pkg_names_in_state(st):
diff --git a/piuparts-report.py b/piuparts-report.py
index 9e9d3b0..b7876fa 100644
--- a/piuparts-report.py
+++ b/piuparts-report.py
@@ -353,7 +353,7 @@ ANALYSIS_BODY_TEMPLATE = """
"""
PROB_TPL = \
-"""<tr class="titlerow"><td class="titlecell">
+ """<tr class="titlerow"><td class="titlecell">
$HEADER in $SECTION, sorted by reverse dependency count.
</td></tr><tr class="normalrow"><td class="contentcell2">
$HELPTEXT
@@ -367,7 +367,7 @@ $PACKAGE_LIST</ul>
"""
PKG_ERROR_TPL = \
-"""<li>$RDEPS - <a href=\"$LOG\">$LOG</a>
+ """<li>$RDEPS - <a href=\"$LOG\">$LOG</a>
(<a href=\"https://tracker.debian.org/pkg/$SPKG\" target=\"_blank\">PTS</a>)
(<a href=\"https://bugs.debian.org/$PACKAGE?dist=unstable\" target=\"_blank\">BTS</a>)
$BUG</li>
@@ -538,12 +538,13 @@ def html_protect(vstr):
vstr = "'".join(vstr.split("'"))
return vstr
+
def is_bad_state(state):
bad_states = [
#"successfully-tested",
"failed-testing",
"cannot-be-tested",
- #"essential-required", # obsolete
+ # "essential-required", # obsolete
#"waiting-to-be-tested",
#"waiting-for-dependency-to-be-tested",
"dependency-failed-testing",
@@ -558,19 +559,22 @@ def is_bad_state(state):
return(state in bad_states)
+
def emphasize_reason(reason):
if is_bad_state(reason):
- reason = "<em>"+reason+"</em>"
+ reason = "<em>" + reason + "</em>"
return reason
+
def source_subdir(source):
if source[:3] == "lib":
return source[:4]
else:
return source[:1]
+
def source_summary_url(web_host, doc_root, section, src_pkg):
- return( "https://%s%s/%s/source/%s/%s.html" %
+ return("https://%s%s/%s/source/%s/%s.html" %
(
web_host,
doc_root,
@@ -578,7 +582,8 @@ def source_summary_url(web_host, doc_root, section, src_pkg):
source_subdir(src_pkg),
src_pkg,
)
- )
+ )
+
def maintainer_subdir(maintainer):
return maintainer.lower()[:1]
@@ -597,6 +602,7 @@ def find_files_with_suffix(vdir, suffix):
# sort by mtime
return [x[1] for x in sorted(pairs)]
+
def update_file(source, target):
if os.path.exists(target):
try:
@@ -617,7 +623,7 @@ def update_file(source, target):
try:
shutil.copyfile(source, target)
except IOError as (errno, strerror):
- logging.error("failed to copy %s to %s: I/O error(%d): %s" \
+ logging.error("failed to copy %s to %s: I/O error(%d): %s"
% (source, target, errno, strerror))
@@ -631,6 +637,7 @@ def copy_logs(logs_by_dir, output_dir):
target = os.path.join(fulldir, basename)
update_file(source, target)
+
def remove_old_logs(logs_by_dir, output_dir):
for vdir in logs_by_dir:
fulldir = os.path.join(output_dir, vdir)
@@ -657,6 +664,7 @@ def append_file(filename, contents):
f.write(contents)
f.close()
+
def read_file(filename):
f = file(filename, "r")
l = f.readlines()
@@ -673,13 +681,13 @@ def write_template_html(filename, body, mapping={}):
"content_md5": "",
"piuparts_version": "",
"time": "",
- })
+ })
content_md5 = hashlib.md5(htmlpage.safe_substitute(mapping)).hexdigest()
mapping.update({
"content_md5": content_md5,
"piuparts_version": PIUPARTS_VERSION,
"time": time.strftime("%Y-%m-%d %H:%M %Z"),
- })
+ })
write_file(filename, htmlpage.safe_substitute(mapping))
@@ -687,13 +695,14 @@ def create_section_navigation(section_names, current_section, doc_root):
tablerows = ""
for section in section_names:
tablerows += ("<tr class=\"normalrow\"><td class=\"contentcell\"><a href='%s/%s'>%s</a></td></tr>\n") % \
- (doc_root, html_protect(section), html_protect(section))
+ (doc_root, html_protect(section), html_protect(section))
tablerows += "<tr><td class=\"contentcell\"><a href=\"%s/%s/maintainer/\">by maintainer / uploader</a></td></tr>\n" \
% (doc_root, current_section)
tablerows += "<tr><td class=\"contentcell\"><a href=\"%s/%s/source/\">by source package</a></td></tr>\n" \
% (doc_root, current_section)
return tablerows;
+
def get_email_address(maintainer):
email = "INVALID maintainer address: %s" % (maintainer)
try:
@@ -727,11 +736,11 @@ class Section:
self._config = Config(section=section, defaults_section="global")
self._config.read(CONFIG_FILE)
self._distro_config = piupartslib.conf.DistroConfig(
- DISTRO_CONFIG_FILE, self._config["mirror"])
+ DISTRO_CONFIG_FILE, self._config["mirror"])
logging.debug("-------------------------------------------")
logging.debug("Running section " + self._config.section)
- self._section_directory = os.path.abspath(os.path.join(master_directory, \
+ self._section_directory = os.path.abspath(os.path.join(master_directory,
self._config.section))
if not os.path.exists(self._section_directory):
logging.debug("Warning: %s did not exist, now created. Did you ever let the slave work?"
@@ -748,8 +757,8 @@ class Section:
self._source_db = piupartslib.packagesdb.PackagesDB(prefix=self._section_directory)
self._source_db.load_packages_urls(
- self._distro_config.get_sources_urls(
- self._config.get_distro(),
+ self._distro_config.get_sources_urls(
+ self._config.get_distro(),
self._config.get_area()))
if self._config.get_distro() != self._config.get_final_distro():
# take version numbers (or None) from final distro
@@ -785,8 +794,8 @@ class Section:
# only cache the big base databases that don't have additional dependencies
self._packagedb_cache[section] = db
db.load_packages_urls(
- self._distro_config.get_packages_urls(
- config.get_distro(),
+ self._distro_config.get_packages_urls(
+ config.get_distro(),
config.get_area(),
config.get_arch()))
if config.get_distro() != config.get_final_distro():
@@ -803,10 +812,9 @@ class Section:
"section_navigation": self._section_navigation,
"doc_root": self._doc_root,
"section": html_protect(self._config.section),
- })
+ })
write_template_html(filename, body, mapping)
-
def write_log_list_page(self, filename, title, preface, logs):
packages = {}
for pathname, package, version in logs:
@@ -829,15 +837,15 @@ class Section:
lines.append(line)
if "FAIL" in preface:
- title_style="alerttitlecell"
+ title_style = "alerttitlecell"
else:
- title_style="titlecell"
+ title_style = "titlecell"
self._write_template_html(
- filename,
+ filename,
LOG_LIST_BODY_TEMPLATE,
{
- "page_title": html_protect(title+" in "+self._config.section),
+ "page_title": html_protect(title + " in " + self._config.section),
"title": html_protect(title),
"title_style": title_style,
"preface": preface,
@@ -846,7 +854,6 @@ class Section:
"logrows": "".join(lines),
})
-
def print_by_dir(self, output_directory, logs_by_dir):
for vdir in logs_by_dir:
vlist = []
@@ -886,16 +893,16 @@ class Section:
if package_name in self._log_name_cache[vdir]:
basename = package_name \
- + "_" \
+ + "_" \
+ self._log_name_cache[vdir][package_name] \
+ ".log"
links.append("<a href=\"%s/%s\"%s>%s</a>" % (
- self._doc_root,
+ self._doc_root,
os.path.join(self._config.section, vdir, basename),
style,
html_protect(self._log_name_cache[vdir][package_name]),
- ))
+ ))
return links
@@ -914,9 +921,9 @@ class Section:
def link_to_source_summary(self, package_name):
source_name = self._binary_db.get_source(package_name)
link = "<a href=\"%s/%s/source/%s\">%s</a>" % (
- self._doc_root,
+ self._doc_root,
self._config.section,
- source_subdir(source_name)+"/"+source_name+".html",
+ source_subdir(source_name) + "/" + source_name + ".html",
html_protect(package_name))
return link
@@ -926,7 +933,7 @@ class Section:
link = "<a href=\"%s/%s/%s\">%s</a>" % (
self._doc_root,
section,
- "state-"+state+".html"+"#"+package_name,
+ "state-" + state + ".html" + "#" + package_name,
link_target)
else:
if link_target == package_name:
@@ -948,7 +955,7 @@ class Section:
dirs = ["untestable"]
if dirs != "":
- links = self.find_links_to_logs (package_name, dirs, logs_by_dir)
+ links = self.find_links_to_logs(package_name, dirs, logs_by_dir)
link = ", ".join(links)
if "/bugged/" in link or "/affected/" in link:
@@ -990,7 +997,6 @@ class Section:
return total
-
def create_maintainer_summaries(self, maintainers, source_data):
logging.debug("Writing %d maintainer summaries in %s" % (len(maintainers), self._output_directory))
maintainer_dir = os.path.join(self._output_directory, "maintainer")
@@ -1046,19 +1052,19 @@ class Section:
distrolinks += "</td></tr>"
self._write_template_html(
- os.path.join(maintainer_subdir_path, maintainer + ".html"),
+ os.path.join(maintainer_subdir_path, maintainer + ".html"),
MAINTAINER_BODY_TEMPLATE,
{
- "page_title": html_protect("Status of " \
- + maintainer \
- + " packages in " \
+ "page_title": html_protect("Status of "
+ + maintainer
+ + " packages in "
+ self._config.section),
- "maintainer": html_protect(maintainer+" in "+self._config.section),
- "distrolinks": distrolinks,
- "rows": rows + "".join([package_rows[state] for state in states]),
+ "maintainer": html_protect(maintainer + " in " + self._config.section),
+ "distrolinks": distrolinks,
+ "rows": rows + "".join([package_rows[state] for state in states]),
})
- def create_source_summary (self, source, logs_by_dir):
+ def create_source_summary(self, source, logs_by_dir):
source_version = self._source_db.get_control_header(source, "Version")
binaries = self._source_db.get_control_header(source, "Binary")
maintainer = self._source_db.get_control_header(source, "Maintainer")
@@ -1082,11 +1088,11 @@ class Section:
state = self._binary_db.get_package_state(binary)
if not "waiting" in state and "dependency" in state:
- state_style="lightalertlabelcell"
+ state_style = "lightalertlabelcell"
elif state == "failed-testing":
- state_style="lightlabelcell"
+ state_style = "lightlabelcell"
else:
- state_style="labelcell"
+ state_style = "labelcell"
binary_version = self._binary_db.get_control_header(binary, "Version")
binaryrows += "<tr class=\"normalrow\">" \
@@ -1109,10 +1115,12 @@ class Section:
failed = True
if binaryrows != "":
- source_state="unknown"
+ source_state = "unknown"
- if success: source_state="<img src=\"%s/images/sunny.png\" alt=\"success\">" % self._doc_root
- if failed: source_state="<img src=\"%s/images/weather-severe-alert.png\" alt=\"failed\">" % self._doc_root
+ if success:
+ source_state = "<img src=\"%s/images/sunny.png\" alt=\"success\">" % self._doc_root
+ if failed:
+ source_state = "<img src=\"%s/images/weather-severe-alert.png\" alt=\"failed\">" % self._doc_root
sourcerows = "<tr class=\"titlerow\">" \
+ "<td class=\"titlecell\" colspan=\"6\" id=\"%s\">%s in %s</td>" \
@@ -1157,20 +1165,21 @@ class Section:
os.path.join(source_summary_page_path, (source + ".html")),
SOURCE_PACKAGE_BODY_TEMPLATE,
{
- "page_title": html_protect("Status of source package "+source+" in "+self._config.section),
- "rows": sourcerows+binaryrows,
+ "page_title": html_protect("Status of source package " + source + " in " + self._config.section),
+ "rows": sourcerows + binaryrows,
})
# return parsable values
- if success: source_state = "pass"
- if failed: source_state = "fail"
+ if success:
+ source_state = "pass"
+ if failed:
+ source_state = "fail"
else:
source_state = "udeb"
sourcerows = ""
return sourcerows, binaryrows, source_state, maintainer, uploaders
-
def create_package_summaries(self, logs_by_dir):
logging.debug("Writing source summaries in %s" % self._config.section)
@@ -1206,17 +1215,16 @@ class Section:
self.create_maintainer_summaries(maintainers, source_binary_rows)
-
def make_section_stats_graph(self):
countsfile = os.path.join(self._section_directory, "counts.txt")
pngfile = os.path.join(self._output_directory, "states.png")
grdevices = importr('grDevices')
grdevices.png(file=pngfile, width=1600, height=900, pointsize=10, res=100, antialias="none")
r = robjects.r
- r('t <- (read.table("'+countsfile+'",sep=",",header=1,row.names=1))')
+ r('t <- (read.table("' + countsfile + '",sep=",",header=1,row.names=1))')
r('cname <- c("date",rep(colnames(t)))')
# here we define how many days we wants stats for (163=half a year)
- #r('v <- t[(nrow(t)-163):nrow(t),0:12]')
+ # r('v <- t[(nrow(t)-163):nrow(t),0:12]')
# make graph since day 1
r('v <- t[0:nrow(t),0:12]')
# thanks to http://tango.freedesktop.org/Generic_Icon_Theme_Guidelines for those nice colors
@@ -1224,7 +1232,7 @@ class Section:
"#fce94f", "#a40000", "#888a85", "#2e3436", "#729fcf", \
"#3465a4", "#204a87", "#555753"))')
r('barplot(t(v),col = 1:13, \
- main="Binary packages per state in '+self._config.section+'", \
+ main="Binary packages per state in ' + self._config.section + '", \
xlab="", ylab="Number of binary packages", space=0, border=NA)')
r('legend(x="bottom",legend=colnames(t), ncol=2,fill=1:13,xjust=0.5,yjust=0,bty="n")')
grdevices.dev_off()
@@ -1241,13 +1249,12 @@ class Section:
return stats_html
-
def create_and_link_to_analysises(self, state):
- link="<ul>\n"
+ link = "<ul>\n"
for template, linktarget in linktarget_by_template:
# successful logs only have issues and failed logs only have errors
if (state == "failed-testing" and template[-9:] != "issue.tpl") \
- or (state == "successfully-tested" and template[-9:] == "issue.tpl"):
+ or (state == "successfully-tested" and template[-9:] == "issue.tpl"):
substats = ""
tpl = os.path.join(self._output_directory, template)
@@ -1258,11 +1265,11 @@ class Section:
os.unlink(tpl)
self._write_template_html(
- os.path.join(self._output_directory, template[:-len(".tpl")]+".html"),
+ os.path.join(self._output_directory, template[:-len(".tpl")] + ".html"),
ANALYSIS_BODY_TEMPLATE,
{
- "page_title": html_protect("Packages in state "+state+" "+linktarget),
- "rows": rows,
+ "page_title": html_protect("Packages in state " + state + " " + linktarget),
+ "rows": rows,
})
if state == "failed-testing":
count_bugged = string.count(rows, '"bugged/')
@@ -1283,7 +1290,7 @@ class Section:
substats += ": %s passed" % count_passed
link += "<li><a href=%s>%s</a>%s</li>\n" % \
(
- template[:-len(".tpl")]+".html",
+ template[:-len(".tpl")] + ".html",
linktarget,
substats,
)
@@ -1340,10 +1347,10 @@ class Section:
os.path.join(self._output_directory, "index.html"),
SECTION_INDEX_BODY_TEMPLATE,
{
- "page_title": html_protect(self._config.section+" statistics"),
- "description": html_protect(description),
- "tablerows": tablerows,
- "packagesurl": "<br>".join([html_protect(url) for url in self._binary_db.get_urls()]),
+ "page_title": html_protect(self._config.section + " statistics"),
+ "description": html_protect(description),
+ "tablerows": tablerows,
+ "packagesurl": "<br>".join([html_protect(url) for url in self._binary_db.get_urls()]),
})
def _show_providers(self, dep):
@@ -1380,7 +1387,7 @@ class Section:
package2id(package["Package"]),
self.link_to_source_summary(package["Package"]))
if with_counts:
- vlist += " (%d, %d)" % (self._binary_db.rrdep_count(package["Package"]), \
+ vlist += " (%d, %d)" % (self._binary_db.rrdep_count(package["Package"]),
self._binary_db.block_count(package["Package"]))
vlist += " (%s)" % html_protect(package["Maintainer"])
all_deps = unique(package.all_dependencies())
@@ -1409,20 +1416,18 @@ class Section:
os.path.join(self._output_directory, "state-%s.html" % state),
STATE_BODY_TEMPLATE,
{
- "page_title": html_protect("Packages in state "+state+" in "+self._config.section),
+ "page_title": html_protect("Packages in state " + state + " in " + self._config.section),
"state": html_protect(state),
"list": vlist,
"aside": aside,
})
-
def archive_logfile(self, vdir, log):
archivedir = os.path.join("archive", vdir)
if not os.path.exists(archivedir):
os.makedirs(archivedir)
os.rename(os.path.join(vdir, log), os.path.join("archive", vdir, log))
-
def cleanup_removed_packages(self, logs_by_dir):
vdirs = logs_by_dir.keys()
vdirs.remove("reserved")
@@ -1442,7 +1447,6 @@ class Section:
self.archive_logfile(vdir, log)
logs_by_dir[vdir].remove(log)
-
def generate_html(self):
logging.debug("Finding log files")
dirs = ["pass", "fail", "bugged", "affected", "reserved", "untestable"]
@@ -1531,6 +1535,7 @@ class Section:
self.generate_summary(web_host)
+
def sections_by_precedence(sections):
precedence = {}
count = 0
@@ -1542,6 +1547,7 @@ def sections_by_precedence(sections):
return sorted(sections, key=lambda x: precedence[x])
+
def generate_global_summary(dir, sections):
json_name = "summary.json"
@@ -1550,16 +1556,17 @@ def generate_global_summary(dir, sections):
summary = pkgsummary.new_summary()
for section in sections_by_precedence(sections):
- sec_path = os.path.join(dir, section, json_name)
- if os.path.isfile(sec_path):
- sec_summ = pkgsummary.read_summary(sec_path)
- summary = pkgsummary.merge_summary(summary, sec_summ)
+ sec_path = os.path.join(dir, section, json_name)
+ if os.path.isfile(sec_path):
+ sec_summ = pkgsummary.read_summary(sec_path)
+ summary = pkgsummary.merge_summary(summary, sec_summ)
summary_path = os.path.join(dir, json_name)
pkgsummary.write_summary(summary, summary_path)
# START detect_well_known_errors
+
def get_bug_text(logpath):
bugpath = replace_ext(logpath, BUG_EXT)
@@ -1571,6 +1578,7 @@ def get_bug_text(logpath):
return txt
+
def populate_tpl(tmpl, vals):
for key in vals:
@@ -1578,6 +1586,7 @@ def populate_tpl(tmpl, vals):
return tmpl
+
def update_tpl(basedir, section, problem, failures, logdict, ftpl, ptpl, pkgsdb):
pkg_text = ""
@@ -1598,7 +1607,7 @@ def update_tpl(basedir, section, problem, failures, logdict, ftpl, ptpl, pkgsdb)
'PACKAGE': bin_pkg,
'BUG': get_bug_text(log),
'RDEPS': rdep_cnt,
- 'SPKG':src_pkg,
+ 'SPKG': src_pkg,
})
if len(pkg_text):
@@ -1609,9 +1618,10 @@ def update_tpl(basedir, section, problem, failures, logdict, ftpl, ptpl, pkgsdb)
'COMMAND': problem.get_command(),
'PACKAGE_LIST': pkg_text,
'COUNT': len(failures),
- })
+ })
return ""
+
def update_html(section, html_dir, logdict, problem_list, failures, pkgsdb):
for problem in problem_list:
tpl_text = update_tpl(html_dir, section, problem,
@@ -1622,6 +1632,7 @@ def update_html(section, html_dir, logdict, problem_list, failures, pkgsdb):
with open(os.path.join(html_dir, problem.name[:-5] + TPL_EXT), 'w') as pf:
pf.write(tpl_text)
+
def dwke_process_section(section, sectiondir, htmldir, problem_list, pkgsdb):
workdirs = [os.path.join(sectiondir, x) for x in KPR_DIRS]
@@ -1641,7 +1652,7 @@ def make_bts_stats_graph(master_dir, out_dir):
grdevices = importr('grDevices')
grdevices.png(file=pngfile, width=1600, height=900, pointsize=10, res=100)
r = robjects.r
- r('t <- (read.table("'+countsfile+'",sep=",",header=1,row.names=1))')
+ r('t <- (read.table("' + countsfile + '",sep=",",header=1,row.names=1))')
r('cname <- c("date",rep(colnames(t)))')
# make graph since day 1
r('v <- t[0:nrow(t),0:4]')
@@ -1689,10 +1700,10 @@ def main():
# static pages
logging.debug("Writing static pages")
for page in ("index", "bug_howto"):
- tpl = os.path.join(output_directory, page+".tpl")
+ tpl = os.path.join(output_directory, page + ".tpl")
INDEX_BODY = "".join(read_file(tpl))
write_template_html(
- os.path.join(output_directory, page+".html"),
+ os.path.join(output_directory, page + ".html"),
INDEX_BODY,
{
"page_title": "About piuparts.debian.org and News",
diff --git a/piuparts-slave.py b/piuparts-slave.py
index 22ee204..0bb7faf 100644
--- a/piuparts-slave.py
+++ b/piuparts-slave.py
@@ -46,12 +46,13 @@ apt_pkg.init_system()
CONFIG_FILE = "/etc/piuparts/piuparts.conf"
DISTRO_CONFIG_FILE = "/etc/piuparts/distros.conf"
-MAX_WAIT_TEST_RUN = 45*60
+MAX_WAIT_TEST_RUN = 45 * 60
interrupted = False
old_sigint_handler = None
got_sighup = False
+
def setup_logging(log_level, log_file_name):
logger = logging.getLogger()
logger.setLevel(log_level)
@@ -73,41 +74,43 @@ class Config(piupartslib.conf.Config):
def __init__(self, section="slave", defaults_section=None):
self.section = section
piupartslib.conf.Config.__init__(self, section,
- {
- "sections": "slave",
- "idle-sleep": 300,
- "max-tgz-age": 2592000,
- "min-tgz-retry-delay": 21600,
- "master-host": None,
- "master-user": None,
- "master-command": None,
- "proxy": None,
- "mirror": None,
- "piuparts-command": "sudo piuparts",
- "piuparts-flags": "",
- "tmpdir": None,
- "distro": None,
- "area": None,
- "components": None,
- "chroot-tgz": None,
- "upgrade-test-distros": None,
- "basetgz-directory": ".",
- "max-reserved": 1,
- "debug": "no",
- "keep-sources-list": "no",
- "arch": None,
- "precedence": "1",
- "slave-load-max": None,
- },
- defaults_section=defaults_section)
+ {
+ "sections": "slave",
+ "idle-sleep": 300,
+ "max-tgz-age": 2592000,
+ "min-tgz-retry-delay": 21600,
+ "master-host": None,
+ "master-user": None,
+ "master-command": None,
+ "proxy": None,
+ "mirror": None,
+ "piuparts-command": "sudo piuparts",
+ "piuparts-flags": "",
+ "tmpdir": None,
+ "distro": None,
+ "area": None,
+ "components": None,
+ "chroot-tgz": None,
+ "upgrade-test-distros": None,
+ "basetgz-directory": ".",
+ "max-reserved": 1,
+ "debug": "no",
+ "keep-sources-list": "no",
+ "arch": None,
+ "precedence": "1",
+ "slave-load-max": None,
+ },
+ defaults_section=defaults_section)
class Alarm(Exception):
pass
+
def alarm_handler(signum, frame):
raise Alarm
+
def sigint_handler(signum, frame):
global interrupted
interrupted = True
@@ -115,6 +118,7 @@ def sigint_handler(signum, frame):
print 'Press Ctrl-C again to abort now.'
signal(SIGINT, old_sigint_handler)
+
def sighup_handler(signum, frame):
global got_sighup
got_sighup = True
@@ -341,7 +345,7 @@ class Section:
self._config = Config(section=section, defaults_section="global")
self._config.read(CONFIG_FILE)
self._distro_config = piupartslib.conf.DistroConfig(
- DISTRO_CONFIG_FILE, self._config["mirror"])
+ DISTRO_CONFIG_FILE, self._config["mirror"])
self._error_wait_until = 0
self._idle_wait_until = 0
self._recycle_wait_until = 0
@@ -363,7 +367,6 @@ class Section:
self._slave = slave or Slave()
-
def _throttle_if_overloaded(self):
global interrupted
if interrupted or got_sighup:
@@ -392,7 +395,6 @@ class Section:
if secs < 300:
secs += random.randrange(30, 90)
-
def _connect_to_master(self, recycle=False):
self._slave.set_master_host(self._config["master-host"])
self._slave.set_master_user(self._config["master-user"])
@@ -402,11 +404,10 @@ class Section:
if recycle:
self._slave.enable_recycling()
-
def _get_tarball(self):
basetgz = self._config["chroot-tgz"] or \
- self._distro_config.get_basetgz(self._config.get_start_distro(),
- self._config.get_arch())
+ self._distro_config.get_basetgz(self._config.get_start_distro(),
+ self._config.get_arch())
return os.path.join(self._config["basetgz-directory"], basetgz)
def _check_tarball(self):
@@ -453,7 +454,6 @@ class Section:
return max(self._error_wait_until, self._recycle_wait_until)
return max(self._error_wait_until, self._idle_wait_until)
-
def run(self, do_processing=True, recycle=False):
if time.time() < self.sleep_until(recycle=recycle):
return 0
@@ -472,7 +472,7 @@ class Section:
action = "Recycling"
if not do_processing:
action = "Flushing"
- logging.info("%s section %s (precedence=%d)" \
+ logging.info("%s section %s (precedence=%d)"
% (action, self._config.section, self.precedence()))
self._config = Config(section=self._config.section, defaults_section="global")
@@ -483,7 +483,7 @@ class Section:
self._error_wait_until = time.time() + 3600
return 0
self._distro_config = piupartslib.conf.DistroConfig(
- DISTRO_CONFIG_FILE, self._config["mirror"])
+ DISTRO_CONFIG_FILE, self._config["mirror"])
if int(self._config["max-reserved"]) == 0:
logging.info("disabled")
@@ -525,7 +525,6 @@ class Section:
os.chdir(oldcwd)
return 0
-
def _talk_to_master(self, fetch=False, unreserve=False, recycle=False):
flush = self._count_submittable_logs() > 0
fetch = fetch and not self._slave.get_reserved()
@@ -589,7 +588,6 @@ class Section:
return True
return False
-
def _process(self):
global interrupted
self._slave.close()
@@ -601,8 +599,8 @@ class Section:
try:
pf = piupartslib.packagesdb.PackagesFile()
pf.load_packages_urls(
- self._distro_config.get_packages_urls(
- distro,
+ self._distro_config.get_packages_urls(
+ distro,
self._config.get_area(),
self._config.get_arch()),
packagenames)
@@ -632,7 +630,6 @@ class Section:
self._talk_to_master(unreserve=interrupted)
return test_count
-
def _test_package(self, pname, pvers, packages_files):
global old_sigint_handler
old_sigint_handler = signal(SIGINT, sigint_handler)
@@ -745,7 +742,7 @@ class Section:
output.write(" *** Process KILLED - exceed maximum run time ***\n")
elif not "piuparts run ends" in lastline:
ret += 1024
- output.write(" *** PIUPARTS OUTPUT INCOMPLETE ***\n");
+ output.write(" *** PIUPARTS OUTPUT INCOMPLETE ***\n")
output.write("\n")
output.write("ret=%d\n" % ret)
@@ -773,7 +770,7 @@ def run_test_with_timeout(cmd, maxwait, kill_all=True):
pids = [p.pid]
if kill_all:
ps = subprocess.Popen(["ps", "--no-headers", "-o", "pid", "--ppid", "%d" % p.pid],
- stdout = subprocess.PIPE)
+ stdout=subprocess.PIPE)
stdout, stderr = ps.communicate()
pids.extend([int(pid) for pid in stdout.split()])
if p.poll() is None:
diff --git a/piuparts.py b/piuparts.py
index 2911328..cf678aa 100644
--- a/piuparts.py
+++ b/piuparts.py
@@ -87,6 +87,7 @@ class Defaults:
def get_keyring(self):
"""Return default keyring."""
+
class DebianDefaults(Defaults):
def get_components(self):
@@ -101,6 +102,7 @@ class DebianDefaults(Defaults):
def get_keyring(self):
return "/usr/share/keyrings/debian-archive-keyring.gpg"
+
class UbuntuDefaults(Defaults):
def get_components(self):
@@ -115,6 +117,7 @@ class UbuntuDefaults(Defaults):
def get_keyring(self):
return "/usr/share/keyrings/ubuntu-archive-keyring.gpg"
+
class DefaultsFactory:
"""Instantiate the right defaults class."""
@@ -314,7 +317,7 @@ class Settings:
"/var/lib/rbldns/",
"/var/spool/powerdns/", # pdns-server (#531134), pdns-recursor (#531135)
# work around broken symlinks
- "/usr/lib/python2.6/dist-packages/python-support.pth", #635493 and #385775
+ "/usr/lib/python2.6/dist-packages/python-support.pth", # 635493 and #385775
"/usr/lib/python2.7/dist-packages/python-support.pth",
"/etc/modules-load.d/modules.conf",
# work around #316521 dpkg: incomplete cleanup of empty directories
@@ -339,7 +342,7 @@ class Settings:
"/usr/share/python3/",
"/usr/share/python3/debpython/",
# HACKS
- ]
+ ]
self.ignored_patterns = [
# system state
"/dev/.*",
@@ -362,7 +365,7 @@ class Settings:
"/var/lib/mercurial-server/.*",
"/var/lib/onak/.*",
"/var/lib/openvswitch/(pki/.*)?",
- "/var/lib/vmm/(./.*)?", #682184
+ "/var/lib/vmm/(./.*)?", # 682184
"/var/log/exim/.*",
"/var/log/exim4/.*",
"/var/spool/exim/.*",
@@ -372,10 +375,10 @@ class Settings:
"/var/www/.*",
# HACKS
"/lib/modules/.*/modules.*",
- ]
+ ]
self.non_pedantic_ignore_patterns = [
"/tmp/.*"
- ]
+ ]
settings = Settings()
@@ -413,6 +416,7 @@ class TimeOffsetFormatter(logging.Formatter):
DUMP = logging.DEBUG - 1
HANDLERS = []
+
def setup_logging(log_level, log_file_name):
logging.addLevelName(DUMP, "DUMP")
@@ -465,6 +469,7 @@ def unqualify(packages):
class Alarm(Exception):
pass
+
def alarm_handler(signum, frame):
raise Alarm
@@ -528,10 +533,10 @@ def run(command, ignore_errors=False, timeout=0):
logging.debug("Command ok: %s" % repr(command))
elif ignore_errors:
logging.debug("Command failed (status=%d), but ignoring error: %s" %
- (p.returncode, repr(command)))
+ (p.returncode, repr(command)))
else:
logging.error("Command failed (status=%d): %s\n%s" %
- (p.returncode, repr(command), indent_string(output)))
+ (p.returncode, repr(command), indent_string(output)))
panic()
return p.returncode, output
@@ -577,7 +582,7 @@ def make_metapackage(name, depends, conflicts):
panic_handler_id = do_on_panic(lambda: shutil.rmtree(tmpdir))
create_file(os.path.join(tmpdir, ".piuparts.tmpdir"), "metapackage creation")
old_umask = os.umask(0)
- os.makedirs(os.path.join(tmpdir, name, 'DEBIAN'), mode = 0755)
+ os.makedirs(os.path.join(tmpdir, name, 'DEBIAN'), mode=0755)
os.umask(old_umask)
control = deb822.Deb822()
control['Package'] = name
@@ -607,7 +612,7 @@ def split_path(pathname):
parts = []
while pathname:
(head, tail) = os.path.split(pathname)
- #print "split '%s' => '%s' + '%s'" % (pathname, head, tail)
+ # print "split '%s' => '%s' + '%s'" % (pathname, head, tail)
if tail:
parts.append(tail)
elif not head:
@@ -618,6 +623,7 @@ def split_path(pathname):
pathname = head
return parts
+
def canonicalize_path(root, pathname, report_links=False):
"""Canonicalize a path name, simulating chroot at 'root'.
@@ -631,18 +637,18 @@ def canonicalize_path(root, pathname, report_links=False):
one for each symlink encountered.
"""
- #print "\nCANONICALIZE %s %s" % (root, pathname)
+ # print "\nCANONICALIZE %s %s" % (root, pathname)
links = []
seen = []
parts = split_path(pathname)
- #print "PARTS ", list(reversed(parts))
+ # print "PARTS ", list(reversed(parts))
path = "/"
while parts:
tag = "\n".join(parts + [path])
- #print "TEST '%s' + " % path, list(reversed(parts))
+ # print "TEST '%s' + " % path, list(reversed(parts))
if tag in seen or len(seen) > 1024:
fullpath = os.path.join(path, *reversed(parts))
- #print "LOOP %s" % fullpath
+ # print "LOOP %s" % fullpath
path = fullpath
logging.error("ELOOP: Too many symbolic links in '%s'" % path)
break
@@ -658,14 +664,14 @@ def canonicalize_path(root, pathname, report_links=False):
path = "/"
elif os.path.islink(rootedpath):
target = os.readlink(rootedpath)
- #print "LINK to '%s'" % target
+ # print "LINK to '%s'" % target
links.append((newpath, target))
if os.path.isabs(target):
path = "/"
parts.extend(split_path(target))
else:
path = newpath
- #print "FINAL '%s'" % path
+ # print "FINAL '%s'" % path
if report_links:
return links
return path
@@ -681,7 +687,7 @@ def is_broken_symlink(root, dirpath, filename):
# The symlink chain, if any, has now been resolved. Does the target
# exist?
- #print "EXISTS ", pathname, os.path.exists(pathname)
+ # print "EXISTS ", pathname, os.path.exists(pathname)
return not os.path.exists(pathname)
@@ -700,7 +706,7 @@ class Chroot:
os.chmod(self.name, 0755)
logging.debug("Created temporary directory %s" % self.name)
- def create(self, temp_tgz = None):
+ def create(self, temp_tgz=None):
"""Create a chroot according to user's wishes."""
self.panic_handler_id = do_on_panic(self.remove)
if not settings.schroot:
@@ -821,21 +827,21 @@ class Chroot:
def setup_from_schroot(self, schroot):
self.schroot_session = schroot.split(":", 1)[-1] + "-" + str(uuid.uuid1()) + "-piuparts"
- run(['schroot', '--begin-session', '--chroot', schroot , '--session-name', self.schroot_session])
+ run(['schroot', '--begin-session', '--chroot', schroot, '--session-name', self.schroot_session])
ret_code, output = run(['schroot', '--chroot', "session:" + self.schroot_session, '--location'])
self.name = output.strip()
- logging.info("New schroot session in '%s'" % self.name);
+ logging.info("New schroot session in '%s'" % self.name)
def setup_from_lvm(self, lvm_volume):
"""Create a chroot by creating an LVM snapshot."""
self.lvm_base = os.path.dirname(lvm_volume)
self.lvm_vol_name = os.path.basename(lvm_volume)
- self.lvm_snapshot_name = self.lvm_vol_name + "-" + str(uuid.uuid1());
+ self.lvm_snapshot_name = self.lvm_vol_name + "-" + str(uuid.uuid1())
self.lvm_snapshot = os.path.join(self.lvm_base, self.lvm_snapshot_name)
logging.debug("Creating LVM snapshot %s from %s" % (self.lvm_snapshot, lvm_volume))
run(['lvcreate', '-n', self.lvm_snapshot, '-s', lvm_volume, '-L', settings.lvm_snapshot_size])
- logging.info("Mounting LVM snapshot to %s" % self.name);
+ logging.info("Mounting LVM snapshot to %s" % self.name)
run(['mount', self.lvm_snapshot, self.name])
def setup_from_dir(self, dirname):
@@ -859,11 +865,13 @@ class Chroot:
'usr/bin/eatmydata')):
prefix.append('eatmydata')
if settings.schroot:
- return run(["schroot", "--preserve-environment", "--run-session", "--chroot", "session:" + self.schroot_session, "--directory", "/", "-u", "root", "--"] + prefix + command,
+ return run(
+ ["schroot", "--preserve-environment", "--run-session", "--chroot", "session:" +
+ self.schroot_session, "--directory", "/", "-u", "root", "--"] + prefix + command,
ignore_errors=ignore_errors, timeout=settings.max_command_runtime)
else:
return run(["chroot", self.name] + prefix + command,
- ignore_errors=ignore_errors, timeout=settings.max_command_runtime)
+ ignore_errors=ignore_errors, timeout=settings.max_command_runtime)
def mkdir_p(self, path):
fullpath = self.relative(path)
@@ -914,10 +922,10 @@ class Chroot:
elif "http_proxy" in os.environ:
proxy = os.environ["http_proxy"]
else:
- proxy = None;
- pat = re.compile(r"^Acquire::http::Proxy\s+\"([^\"]+)\"", re.I);
+ proxy = None
+ pat = re.compile(r"^Acquire::http::Proxy\s+\"([^\"]+)\"", re.I)
p = subprocess.Popen(["apt-config", "dump"],
- stdout=subprocess.PIPE)
+ stdout=subprocess.PIPE)
stdout, _ = p.communicate()
if stdout:
for line in stdout.split("\n"):
@@ -932,7 +940,7 @@ class Chroot:
lines.append('Dpkg::Options {"--force-confdef";};\n')
create_file(self.relative("etc/apt/apt.conf.d/piuparts"),
- "".join(lines))
+ "".join(lines))
def create_dpkg_conf(self):
"""Create /etc/dpkg/dpkg.cfg.d/piuparts inside the chroot."""
@@ -946,7 +954,7 @@ class Chroot:
if not os.path.exists(self.relative("etc/dpkg/dpkg.cfg.d")):
os.mkdir(self.relative("etc/dpkg/dpkg.cfg.d"))
create_file(self.relative("etc/dpkg/dpkg.cfg.d/piuparts"),
- "".join(lines))
+ "".join(lines))
def create_policy_rc_d(self):
"""Create a policy-rc.d that prevents daemons from running."""
@@ -976,7 +984,7 @@ class Chroot:
def setup_minimal_chroot(self):
"""Set up a minimal Debian system in a chroot."""
logging.debug("Setting up minimal chroot for %s at %s." %
- (settings.debian_distros[0], self.name))
+ (settings.debian_distros[0], self.name))
prefix = []
if settings.eatmydata and os.path.isfile('/usr/bin/eatmydata'):
prefix.append('eatmydata')
@@ -1062,10 +1070,10 @@ class Chroot:
logging.info("apt-cache does not know about any of the requested packages")
else:
logging.info("apt-cache knows about the following packages: " +
- ", ".join(known_packages))
+ ", ".join(known_packages))
if new_packages:
logging.info("the following packages are not in the archive: " +
- ", ".join(new_packages))
+ ", ".join(new_packages))
return known_packages
def copy_files(self, source_names, target_name):
@@ -1079,10 +1087,10 @@ class Chroot:
shutil.copy(source_name, target_name)
except IOError, detail:
logging.error("Error copying %s to %s: %s" %
- (source_name, target_name, detail))
+ (source_name, target_name, detail))
panic()
- def list_installed_files (self, pre_info, post_info):
+ def list_installed_files(self, pre_info, post_info):
"""List the new files installed, removed and modified between two dir trees.
Actually, it is a nice output of the funcion diff_meta_dat."""
(new, removed, modified) = diff_meta_data(pre_info, post_info)
@@ -1143,10 +1151,10 @@ class Chroot:
pre_info = self.save_meta_data()
self.run(["dpkg", "-i"] + tmp_files, ignore_errors=True)
- self.list_installed_files (pre_info, self.save_meta_data())
+ self.list_installed_files(pre_info, self.save_meta_data())
self.run(apt_get_install)
- self.list_installed_files (pre_info, self.save_meta_data())
+ self.list_installed_files(pre_info, self.save_meta_data())
else:
self.run(["dpkg", "-i"] + tmp_files, ignore_errors=True)
@@ -1156,7 +1164,7 @@ class Chroot:
logging.error("Could not install %s.", " ".join(unqualify(packages)))
panic()
- logging.info ("Installation of %s ok", tmp_files)
+ logging.info("Installation of %s ok", tmp_files)
if with_scripts:
self.run_scripts("post_install")
@@ -1179,14 +1187,13 @@ class Chroot:
if settings.list_installed_files:
pre_info = self.save_meta_data()
self.run(apt_get_install)
- self.list_installed_files (pre_info, self.save_meta_data())
+ self.list_installed_files(pre_info, self.save_meta_data())
else:
self.run(apt_get_install)
if with_scripts:
self.run_scripts("post_install")
-
def get_selections(self):
"""Get current package selections in a chroot."""
(status, output) = self.run(["dpkg", "--get-selections", "*"])
@@ -1203,7 +1210,7 @@ class Chroot:
(status, output) = self.run(["dpkg-divert", "--list"])
return output.split("\n")
- def get_modified_diversions(self, pre_install_diversions, post_install_diversions = None):
+ def get_modified_diversions(self, pre_install_diversions, post_install_diversions=None):
"""Check that diversions in chroot are identical (though potentially reordered)."""
if post_install_diversions is None:
post_install_diversions = self.get_diversions()
@@ -1226,7 +1233,7 @@ class Chroot:
(status, output) = run(["dpkg-query", "-f", "${Version}\n", "-W", "adequate"], ignore_errors=True)
logging.info("Running adequate version %s now." % output.strip())
adequate_tags = [
- 'bin-or-sbin-binary-requires-usr-lib-library',
+ 'bin-or-sbin-binary-requires-usr-lib-library',
'broken-binfmt-detector',
'broken-binfmt-interpreter',
'incompatible-licenses',
@@ -1240,32 +1247,32 @@ class Chroot:
'pyshared-file-not-bytecompiled',
'symbol-size-mismatch',
'undefined-symbol',
- ]
+ ]
boring_tags = [
- 'obsolete-conffile',
+ 'obsolete-conffile',
'broken-symlink',
- ]
- ignored_tags = [ ]
+ ]
+ ignored_tags = []
(status, output) = run(["adequate", "--root", self.name] + packages, ignore_errors=True)
for tag in ignored_tags:
# ignore some tags
- _regex = '^[^:]+: '+tag+' .*\n'
+ _regex = '^[^:]+: ' + tag + ' .*\n'
output = re.compile(_regex, re.MULTILINE).sub('', output)
if output:
inadequate_results = ''
boring_results = ''
for tag in adequate_tags:
- if ' '+tag+' ' in output:
- inadequate_results += ' '+tag+' '
+ if ' ' + tag + ' ' in output:
+ inadequate_results += ' ' + tag + ' '
for tag in boring_tags:
- if ' '+tag+' ' in output:
- boring_results += ' '+tag+' '
+ if ' ' + tag + ' ' in output:
+ boring_results += ' ' + tag + ' '
if settings.warn_if_inadequate:
error_code = 'WARN'
else:
error_code = 'FAIL'
logging.error("%s: Inadequate results from running adequate!\n%s" %
- (error_code, indent_string(output.replace(self.name, ""))))
+ (error_code, indent_string(output.replace(self.name, ""))))
if inadequate_results:
logging.error("%s: Running adequate resulted in inadequate tags found: %s" % (error_code, inadequate_results))
if boring_results:
@@ -1273,9 +1280,9 @@ class Chroot:
if not boring_results and not inadequate_results:
logging.error("%s: Found unknown tags running adequate." % error_code)
if status != 0:
- logging.error("%s: Exit code from adequate was %s!" % (error_code,status))
+ logging.error("%s: Exit code from adequate was %s!" % (error_code, status))
if not settings.warn_if_inadequate:
- panic()
+ panic()
def list_paths_with_symlinks(self):
file_owners = self.get_files_owned_by_packages()
@@ -1292,16 +1299,16 @@ class Chroot:
ofc = ", ".join(file_owners[fc])
else:
ofc = "?"
- bad.append("%s (%s) != %s (%s)" %(f, of, fc, ofc))
+ bad.append("%s (%s) != %s (%s)" % (f, of, fc, ofc))
for (link, target) in canonicalize_path(self.name, dn, report_links=True):
bad.append(" %s -> %s" % (link, target))
if bad:
if overwrites:
logging.error("FAIL: silently overwrites files via directory symlinks:\n" +
- indent_string("\n".join(bad)))
+ indent_string("\n".join(bad)))
else:
logging.info("dirname part contains a symlink:\n" +
- indent_string("\n".join(bad)))
+ indent_string("\n".join(bad)))
def remove_packages(self, packages):
"""Remove packages in a chroot. May reinstall packages at the same time if they are suffixed with '+'."""
@@ -1337,7 +1344,7 @@ class Chroot:
nondeps_to_purge = [name for name, state in nondeps.iteritems()
if state == "purge"]
deps_to_install = [name for name, state in deps.iteritems()
- if state == "install"]
+ if state == "install"]
self.list_paths_with_symlinks()
self.check_debsums()
@@ -1382,7 +1389,6 @@ class Chroot:
self.run(["dpkg", "--purge", "--pending"])
self.run(["dpkg", "--remove", "--pending"])
-
def save_meta_data(self):
"""Return the filesystem meta data for all objects in the chroot."""
self.run(["apt-get", "clean"])
@@ -1430,7 +1436,6 @@ class Chroot:
f.close()
return vdict
-
def check_for_no_processes(self, fail=None):
"""Check there are no processes running inside the chroot."""
(status, output) = run(["lsof", "-w", "+D", self.name], ignore_errors=True)
@@ -1444,7 +1449,6 @@ class Chroot:
self.terminate_running_processes()
panic()
-
def terminate_running_processes(self):
"""Terminate all processes running in the chroot."""
seen = []
@@ -1471,7 +1475,6 @@ class Chroot:
time.sleep(5)
-
def mount_selinux(self):
if selinux_enabled():
run(["mkdir", "-p", self.selinuxfs_relative_path()])
@@ -1552,7 +1555,7 @@ class Chroot:
vdir = self.relative("var/lib/dpkg/info")
vlist = []
- has_cronfiles = False
+ has_cronfiles = False
for p in packages:
basename = p + ".list"
@@ -1576,7 +1579,7 @@ class Chroot:
return has_cronfiles, vlist
- def check_output_cronfiles (self, list):
+ def check_output_cronfiles(self, list):
"""Check if a given list of cronfiles has any output. Executes
cron file as cron would do (except for SHELL)"""
failed = False
@@ -1599,7 +1602,7 @@ class Chroot:
vdir = self.relative("var/lib/dpkg/info")
vlist = []
- has_logrotatefiles = False
+ has_logrotatefiles = False
for p in packages:
basename = p + ".list"
@@ -1627,7 +1630,7 @@ class Chroot:
diff = diff_selections(self, old_selections)
return diff.keys()
- def check_output_logrotatefiles (self, list):
+ def check_output_logrotatefiles(self, list):
"""Check if a given list of logrotatefiles has any output. Executes
logrotate file as logrotate would do from cron (except for SHELL)"""
failed = False
@@ -1644,12 +1647,12 @@ class Chroot:
if failed:
panic()
- def run_scripts (self, step):
+ def run_scripts(self, step):
""" Run custom scripts to given step post-install|remove|purge"""
if not settings.scriptsdirs:
return
- logging.info("Running scripts "+ step)
+ logging.info("Running scripts " + step)
basepath = self.relative("tmp/scripts/")
if not os.path.exists(basepath):
logging.error("Scripts directory %s does not exist" % basepath)
@@ -1675,14 +1678,16 @@ class VirtServ(Chroot):
def _awaitok(self, cmd):
r = self._vs.stdout.readline().rstrip('\n')
l = r.split(' ')
- if l[0] != 'ok': self._fail('virtserver response to %s: %s' % (cmd, r))
+ if l[0] != 'ok':
+ self._fail('virtserver response to %s: %s' % (cmd, r))
logging.debug('adt-virt << %s', r)
return l[1:]
def _vs_send(self, cmd):
if type(cmd) == type([]):
def maybe_quote(a):
- if type(a) != type(()): return a
+ if type(a) != type(()):
+ return a
(a,) = a
return urllib.quote(a)
cmd = ' '.join(map(maybe_quote, cmd))
@@ -1712,12 +1717,13 @@ class VirtServ(Chroot):
if self._vs is None:
logging.debug('adt-virt || %s' % self._cmdline)
self._vs = subprocess.Popen(self._cmdline, shell=True,
- stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None)
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None)
self._awaitok('banner')
self._caps = self._command('capabilities')
def shutdown(self):
- if self._vs is None: return
+ if self._vs is None:
+ return
self._vs_send('quit')
self._vs.stdin.close()
self._vs.stdout.close()
@@ -1729,7 +1735,7 @@ class VirtServ(Chroot):
dont_do_on_panic(self.panic_handler_id)
def _fail(self, m):
- logging.error("adt-virt-* error: "+m)
+ logging.error("adt-virt-* error: " + m)
panic()
def _open(self):
@@ -1741,14 +1747,21 @@ class VirtServ(Chroot):
# adt-virt revert
def create_temp_tgz_file(self):
return self
+
def remove_temp_tgz_file(self, tgz):
- if tgz is not self: self._fail('removing a tgz not supported')
+ if tgz is not self:
+ self._fail('removing a tgz not supported')
# FIXME: anything else to do here?
+
def pack_into_tgz(self, tgz):
- if tgz is not self: self._fail('packing into tgz not supported')
- if not 'revert' in self._caps: self._fail('testbed cannot revert')
+ if tgz is not self:
+ self._fail('packing into tgz not supported')
+ if not 'revert' in self._caps:
+ self._fail('testbed cannot revert')
+
def unpack_from_tgz(self, tgz):
- if tgz is not self: self._fail('unpacking from tgz not supported')
+ if tgz is not self:
+ self._fail('unpacking from tgz not supported')
self._open()
def _execute(self, cmdl, tolerate_errors=False):
@@ -1770,7 +1783,7 @@ class VirtServ(Chroot):
if es and not tolerate_errors:
stderr_data = self._getfilecontents(stderr)
logging.error("Execution failed (status=%d): %s\n%s" %
- (es, `cmdl`, indent_string(stderr_data)))
+ (es, `cmdl`, indent_string(stderr_data)))
panic()
return (es, stdout, stderr)
@@ -1779,7 +1792,7 @@ class VirtServ(Chroot):
stderr_data = self._getfilecontents(stderr)
if es or stderr_data:
logging.error('Internal command failed (status=%d): %s\n%s' %
- (es, `cmdl`, indent_string(stderr_data)))
+ (es, `cmdl`, indent_string(stderr_data)))
panic()
(_, tf) = create_temp_file()
try:
@@ -1794,10 +1807,11 @@ class VirtServ(Chroot):
(es, stdout, stderr) = self._execute(cmdl, tolerate_errors=True)
stdout_data = self._getfilecontents(stdout)
print >>sys.stderr, "VirtServ run", `command`, `cmdl`, '==>', `es`, `stdout`, `stderr`, '|', stdout_data
- if es == 0 or ignore_errors: return (es, stdout_data)
+ if es == 0 or ignore_errors:
+ return (es, stdout_data)
stderr_data = self._getfilecontents(stderr)
logging.error('Command failed (status=%d): %s\n%s' %
- (es, `command`, indent_string(stdout_data + stderr_data)))
+ (es, `command`, indent_string(stdout_data + stderr_data)))
panic()
def setup_minimal_chroot(self):
@@ -1806,18 +1820,21 @@ class VirtServ(Chroot):
def _tbpath(self, with_junk):
if not with_junk.startswith(self.name):
logging.error("Un-mangling testbed path `%s' but it does not"
- "start with expected manglement `%s'" %
- (with_junk, self.name))
+ "start with expected manglement `%s'" %
+ (with_junk, self.name))
panic()
return with_junk[len(self.name):]
def chmod(self, path, mode):
self._execute(['chmod', ('0%o' % mode), self._tbpath(path)])
+
def remove_files(self, paths):
self._execute(['rm', '--'] + map(self._tbpath, paths))
+
def copy_file(self, our_src, tb_dest):
self._command(['copydown', (our_src,),
- (self._tbpath(tb_dest)+'/'+os.path.basename(our_src),)])
+ (self._tbpath(tb_dest) + '/' + os.path.basename(our_src),)])
+
def create_file(self, path, data):
path = self._tbpath(path)
try:
@@ -1829,7 +1846,8 @@ class VirtServ(Chroot):
finally:
os.remove(tf)
- class DummyStat: pass
+ class DummyStat:
+ pass
def save_meta_data(self):
mode_map = {
@@ -1845,7 +1863,7 @@ class VirtServ(Chroot):
vdict = {}
tf = self._execute_getoutput(['find', '/', '-xdev', '-printf',
- "%y %m %U %G %s %p %l \\n".replace(' ', '\\0')])
+ "%y %m %U %G %s %p %l \\n".replace(' ', '\\0')])
try:
f = file(tf)
@@ -1853,17 +1871,20 @@ class VirtServ(Chroot):
line = ''
while 1:
splut = line.split('\0')
- if len(splut) == 8 and splut[7] == '\n': break
+ if len(splut) == 8 and splut[7] == '\n':
+ break
if len(splut) >= 8:
self._fail('aaargh wrong output from find: %s' %
- urllib.quote(line), `splut`)
+ urllib.quote(line), `splut`)
l = f.readline()
if not l:
- if not line: break
+ if not line:
+ break
self._fail('aargh missing final newline from find'
- ': %s, %s' % (`l`[0:200], `splut`[0:200]))
+ ': %s, %s' % (`l`[0:200], `splut`[0:200]))
line += l
- if not line: break
+ if not line:
+ break
st = VirtServ.DummyStat()
st.st_mode = mode_map[splut[0]] | int(splut[1], 8)
@@ -1916,14 +1937,21 @@ class VirtServ(Chroot):
for l in f:
logging.error("FAIL: Broken symlink: " + l)
broken = True
- if broken: panic()
+ if broken:
+ panic()
logging.debug("No broken symlinks found.")
finally:
os.remove(tf)
- def check_for_no_processes(self): pass # ?!
- def mount_proc(self): pass
- def unmount_proc(self): pass
+ def check_for_no_processes(self):
+ pass # ?!
+
+ def mount_proc(self):
+ pass
+
+ def unmount_proc(self):
+ pass
+
def selinux_enabled(enabled_test="/usr/sbin/selinuxenabled"):
if os.access(enabled_test, os.X_OK):
@@ -1933,6 +1961,7 @@ def selinux_enabled(enabled_test="/usr/sbin/selinuxenabled"):
else:
return False
+
def objects_are_different(pair1, pair2):
"""Are filesystem objects different based on their meta data?"""
(m1, target1) = pair1
@@ -1940,12 +1969,13 @@ def objects_are_different(pair1, pair2):
if (m1.st_mode != m2.st_mode or
m1.st_uid != m2.st_uid or
m1.st_gid != m2.st_gid or
- target1 != target2):
+ target1 != target2):
return True
if stat.S_ISREG(m1.st_mode):
- return m1.st_size != m2.st_size # or m1.st_mtime != m2.st_mtime
+ return m1.st_size != m2.st_size # or m1.st_mtime != m2.st_mtime
return False
+
def format_object_attributes(pair):
(st, target) = pair
ft = ""
@@ -1964,7 +1994,7 @@ def format_object_attributes(pair):
if stat.S_ISSOCK(st.st_mode):
ft += "s"
res = "(%d, %d, %s %o, %d, %s)" % (
- st.st_uid,
+ st.st_uid,
st.st_gid,
ft,
st.st_mode,
@@ -2001,8 +2031,8 @@ def diff_meta_data(tree1, tree2):
for name in tree1.keys()[:]:
if name in tree2:
if objects_are_different(tree1[name], tree2[name]):
- logging.debug("Modified(uid, gid, mode, size, target): %s %s != %s" % \
- (name, format_object_attributes(tree1[name]), format_object_attributes(tree2[name])))
+ logging.debug("Modified(uid, gid, mode, size, target): %s %s != %s" %
+ (name, format_object_attributes(tree1[name]), format_object_attributes(tree2[name])))
modified.append((name, tree1[name]))
del tree1[name]
del tree2[name]
@@ -2085,7 +2115,7 @@ def diff_selections(chroot, selections):
if name not in selections:
changes[name] = "purge"
elif selections[name] != current[name] and \
- selections[name] in ["purge", "install"]:
+ selections[name] in ["purge", "install"]:
changes[name] = selections[name]
for name, value in selections.iteritems():
if name not in current:
@@ -2114,6 +2144,8 @@ def get_package_names_from_package_files(package_files):
# Method to process a changes file, returning a list of all the .deb packages
# from the 'Files' stanza.
+
+
def process_changes(changes):
# Determine the path to the changes file, then check if it's readable.
dir_path = ""
@@ -2129,8 +2161,8 @@ def process_changes(changes):
# Determine the packages in the changes file through the 'Files' stanza.
field = 'Files'
- pattern = re.compile(\
- r'^'+field+r':' + r''' # The field we want the contents from
+ pattern = re.compile(
+ r'^' + field + r':' + r''' # The field we want the contents from
(.*?) # The contents of the field
\n([^ ]|$) # Start of a new field or EOF
''',
@@ -2196,7 +2228,7 @@ def check_results(chroot, chroot_state, file_owners, deps_info=None):
file_list(new, file_owners))
else:
logging.error("FAIL: Package purging left files on system:\n" +
- file_list(new, file_owners))
+ file_list(new, file_owners))
ok = False
if removed:
logging.error("FAIL: After purging files have disappeared:\n" +
@@ -2210,7 +2242,7 @@ def check_results(chroot, chroot_state, file_owners, deps_info=None):
if settings.warn_on_others and deps_info is not None:
if warnnew:
msg = ("Warning: Package purging left files on system:\n" +
- file_list(warnnew, file_owners) + \
+ file_list(warnnew, file_owners) +
"These files seem to have been left by dependencies rather "
"than by packages\nbeing explicitly tested.\n")
logging.info(msg)
@@ -2478,7 +2510,8 @@ def install_and_upgrade_between_distros(package_files, packages_qualified):
chroot.remove()
# leave indication in logfile why we do what we do
- logging.info("Notice: package selections and meta data from target distro saved, now starting over from source distro. See the description of --save-end-meta and --end-meta to learn why this is neccessary and how to possibly avoid it.")
+ logging.info(
+ "Notice: package selections and meta data from target distro saved, now starting over from source distro. See the description of --save-end-meta and --end-meta to learn why this is neccessary and how to possibly avoid it.")
chroot = get_chroot()
if temp_tgz is None:
@@ -2557,7 +2590,7 @@ def find_default_debian_mirrors():
parts = line.split()
if len(parts) > 2 and parts[0] == "deb":
mirrors.append((parts[1], parts[3:]))
- break # Only use the first one, at least for now.
+ break # Only use the first one, at least for now.
f.close()
except IOError:
return None
@@ -2575,13 +2608,13 @@ def forget_ignores(option, opt, value, parser, *args, **kwargs):
def set_basetgz_to_pbuilder(option, opt, value, parser, *args, **kwargs):
parser.values.basetgz = "/var/cache/pbuilder/base.tgz"
+
def parse_command_line():
"""Parse the command line, change global settings, return non-options."""
parser = optparse.OptionParser(usage="%prog [options] package ...",
version="piuparts %s" % VERSION)
-
parser.add_option("-a", "--apt", action="store_true", default=False,
help="Command line arguments are package names " +
"to be installed via apt.")
@@ -2867,9 +2900,9 @@ def parse_command_line():
settings.keyring = defaults.get_keyring()
settings.do_not_verify_signatures = opts.do_not_verify_signatures
if settings.do_not_verify_signatures:
- settings.apt_unauthenticated="Yes"
+ settings.apt_unauthenticated = "Yes"
else:
- settings.apt_unauthenticated="No"
+ settings.apt_unauthenticated = "No"
settings.install_recommends = opts.install_recommends
settings.eatmydata = not opts.no_eatmydata
settings.dpkg_force_unsafe_io = not opts.dpkg_noforce_unsafe_io
@@ -2962,7 +2995,7 @@ def parse_command_line():
settings.debian_mirrors = defaults.get_mirror()
settings.distro_config = piupartslib.conf.DistroConfig(
- DISTRO_CONFIG_FILE, settings.debian_mirrors[0][0])
+ DISTRO_CONFIG_FILE, settings.debian_mirrors[0][0])
if settings.keep_sources_list and \
(not settings.basetgz or len(settings.debian_distros) > 1):
@@ -2983,10 +3016,13 @@ def parse_command_line():
def get_chroot():
- if settings.adt_virt is None: return Chroot()
+ if settings.adt_virt is None:
+ return Chroot()
return settings.adt_virt
# Process the packages given in a list
+
+
def process_packages(package_list):
# Find the names of packages.
if settings.args_are_package_files:
@@ -3008,7 +3044,7 @@ def process_packages(package_list):
if not settings.no_install_purge_test:
extra_packages = chroot.get_known_packages(settings.extra_old_packages)
if not install_purge_test(chroot, chroot_state,
- package_files, packages, extra_packages):
+ package_files, packages, extra_packages):
logging.error("FAIL: Installation and purging test.")
panic()
logging.info("PASS: Installation and purging test.")
@@ -3023,7 +3059,7 @@ def process_packages(package_list):
if not known_packages:
logging.info("Can't test upgrade: packages not known by apt-get.")
elif install_upgrade_test(chroot, chroot_state, package_files,
- packages, known_packages):
+ packages, known_packages):
logging.info("PASS: Installation, upgrade and purging tests.")
else:
logging.error("FAIL: Installation, upgrade and purging tests.")
@@ -3037,7 +3073,9 @@ def process_packages(package_list):
logging.error("FAIL: Upgrading between Debian distributions.")
panic()
- if settings.adt_virt is not None: settings.adt_virt.shutdown()
+ if settings.adt_virt is not None:
+ settings.adt_virt.shutdown()
+
def main():
"""Main program. But you knew that."""
@@ -3065,7 +3103,6 @@ def main():
if "DISPLAY" in os.environ:
del os.environ["DISPLAY"]
-
changes_packages_list = []
regular_packages_list = []
changes_p = re.compile('.*\.changes$')
diff --git a/piupartslib/__init__.py b/piupartslib/__init__.py
index b712eac..a496bf4 100644
--- a/piupartslib/__init__.py
+++ b/piupartslib/__init__.py
@@ -29,6 +29,7 @@ import packagesdb
class DecompressedStream():
+
def __init__(self, fileobj, decompressor=None):
self._input = fileobj
self._decompressor = decompressor
diff --git a/piupartslib/conf.py b/piupartslib/conf.py
index def54f1..161ac51 100644
--- a/piupartslib/conf.py
+++ b/piupartslib/conf.py
@@ -103,14 +103,14 @@ class Config(UserDict.UserDict):
debdist = distro_info.DebianDistroInfo()
# start with e.g. "sid" -> "unstable"
- distmap = collections.defaultdict( lambda : "unknown", [
- (debdist.old(), "oldstable"),
+ distmap = collections.defaultdict(lambda: "unknown", [
+ (debdist.old(), "oldstable"),
(debdist.devel(), "unstable"),
(debdist.stable(), "stable"),
(debdist.testing(), "testing"),
("experimental", "experimental"),
("rc", "experimental"),
- ])
+ ])
# add mappings for e.g. "oldstable" -> "oldstable"
distmap.update(dict([(val, val) for key, val in distmap.iteritems()]))
@@ -119,7 +119,7 @@ class Config(UserDict.UserDict):
# currently returns 'Debian 6.0 "Squeeze"'
dkey = lambda x: "Debian" + re.split('[ \.]', x(result="fullname"))[1]
dfuncs = [debdist.old, debdist.stable, debdist.testing]
- distmap.update(dict([(dkey(x),distmap[x()]) for x in dfuncs]))
+ distmap.update(dict([(dkey(x), distmap[x()]) for x in dfuncs]))
return distmap
@@ -132,7 +132,7 @@ class Config(UserDict.UserDict):
if not distrolist:
distrolist = [self.get_distro()] + self.get_distros()
mappedlist = [self._map_distro(x) for x in distrolist]
- return reduce(lambda x,y: y if y != "unknown" else x, mappedlist)
+ return reduce(lambda x, y: y if y != "unknown" else x, mappedlist)
def get_area(self):
if self["area"] is not None:
@@ -154,13 +154,13 @@ class DistroConfig(UserDict.UserDict):
UserDict.UserDict.__init__(self)
self._mirror = mirror
self._defaults = {
- "uri": None,
+ "uri": None,
"distribution": None,
"components": None,
"target-release": None,
"depends": None,
"candidates": None,
- }
+ }
cp = ConfigParser.SafeConfigParser()
cp.read(filename)
for section in cp.sections():
@@ -195,7 +195,7 @@ class DistroConfig(UserDict.UserDict):
def _get_packages_url(self, distro, area, arch):
return "%s/dists/%s/%s/binary-%s/Packages" % (
- self.get_mirror(distro),
+ self.get_mirror(distro),
self.get_distribution(distro),
area, arch)
@@ -205,7 +205,7 @@ class DistroConfig(UserDict.UserDict):
def _get_sources_url(self, distro, area):
return "%s/dists/%s/%s/source/Sources" % (
- self.get_mirror(distro),
+ self.get_mirror(distro),
self.get_distribution(distro),
area)
@@ -228,7 +228,7 @@ class DistroConfig(UserDict.UserDict):
todo = todo[1:]
if not curr in seen:
seen.append(curr)
- todo = (self.get(curr, "depends") or "").split() + [ curr ] + todo
+ todo = (self.get(curr, "depends") or "").split() + [curr] + todo
elif not curr in done:
if include_virtual or not self._is_virtual(curr):
done.append(curr)
diff --git a/piupartslib/dwke.py b/piupartslib/dwke.py
index cba3e30..0e2624b 100644
--- a/piupartslib/dwke.py
+++ b/piupartslib/dwke.py
@@ -32,6 +32,7 @@ LOG_EXT = '.log'
class Problem():
+
""" Encapsulate a particular known problem """
def __init__(self, probpath):
@@ -47,7 +48,6 @@ class Problem():
"HEADER", "HELPTEXT"]
self.optional_tags = ["EXCLUDE_PATTERN", "EXPLAIN", "PRIORITY"]
-
self.init_problem()
for tag in self.required_tags:
@@ -83,8 +83,8 @@ class Problem():
while value[-1] == '\n':
value = value[:-1]
- if re.search("^\'.+\'$", value, re.MULTILINE|re.DOTALL) \
- or re.search('^\".+\"$', value, re.MULTILINE|re.DOTALL):
+ if re.search("^\'.+\'$", value, re.MULTILINE | re.DOTALL) \
+ or re.search('^\".+\"$', value, re.MULTILINE | re.DOTALL):
value = value[1:-1]
if name in self.required_tags or name in self.optional_tags:
@@ -102,7 +102,7 @@ class Problem():
for line in logbody.splitlines():
if self.inc_re.search(line):
if self.exc_re == None \
- or not self.exc_re.search(line):
+ or not self.exc_re.search(line):
return True
return False
@@ -118,6 +118,7 @@ class Problem():
class FailureManager():
+
"""Class to track known failures encountered, by package,
where (e.g. 'fail'), and known problem type"""
@@ -168,27 +169,32 @@ class FailureManager():
self.failures.sort(key=keyfunc)
def filtered(self, problem):
- return [x for x in self.failures if problem==x.problem]
+ return [x for x in self.failures if problem == x.problem]
def make_failure(where, problem, pkgspec):
return (namedtuple('Failure', 'where problem pkgspec')(where, problem, pkgspec))
+
def get_where(logpath):
"""Convert a path to a log file to the 'where' component (e.g. 'pass')"""
return logpath.split('/')[-2]
+
def replace_ext(fpath, newext):
basename = os.path.splitext(os.path.split(fpath)[1])[0]
return '/'.join(fpath.split('/')[:-1] + [basename + newext])
+
def get_pkg(pkgspec):
return pkgspec.split('_')[0]
+
def get_kpr_path(logpath):
"""Return the kpr file path for a particular log path"""
return replace_ext(logpath, KPR_EXT)
+
def get_file_dict(workdirs, ext):
"""For files in [workdirs] with extension 'ext', create a dict of
<pkgname>_<version>: <path>"""
diff --git a/piupartslib/packagesdb.py b/piupartslib/packagesdb.py
index 0fbfde9..7731a2e 100644
--- a/piupartslib/packagesdb.py
+++ b/piupartslib/packagesdb.py
@@ -54,6 +54,7 @@ def rfc822_like_header_parse(input):
headers.append(line)
return headers
+
class Package(UserDict.UserDict):
def __init__(self, headers):
@@ -266,7 +267,7 @@ class PackagesDB:
"unknown",
"unknown-preferred-alternative", # obsolete
"no-dependency-from-alternatives-exists", # obsolete
- #"does-not-exist", # can only happen as query result for a dependency
+ # "does-not-exist", # can only happen as query result for a dependency
]
_good_states = [
@@ -476,7 +477,7 @@ class PackagesDB:
prefer_alt = alternative
prefer_alt_score = 2
elif prefer_alt_score < 1 and \
- altdep_state in ["waiting-to-be-tested", "waiting-for-dependency-to-be-tested"]:
+ altdep_state in ["waiting-to-be-tested", "waiting-for-dependency-to-be-tested"]:
prefer_alt = alternative
prefer_alt_score = 1
elif prefer_alt_score < 0 and altdep_state == "unknown":
@@ -488,7 +489,7 @@ class PackagesDB:
package.prefer_alt_depends(header, d, prefer_alt)
dep_states = [(dep, self.get_best_package_state(dep))
- for dep in package.dependencies()]
+ for dep in package.dependencies()]
for dep, dep_state in dep_states:
if dep_state in self._propagate_error_state:
@@ -711,9 +712,9 @@ class PackagesDB:
if not self._recycle_mode:
return (
- min(rdep_chain_len, waiting_count),
+ min(rdep_chain_len, waiting_count),
waiting_count,
- )
+ )
try:
statobj = self._logdb.stat(self._recycle, p["Package"], p["Version"])
@@ -724,25 +725,25 @@ class PackagesDB:
mtime = 0
return (
- min(rdep_chain_len, waiting_count),
+ min(rdep_chain_len, waiting_count),
waiting_count,
not self._logdb.log_exists(p, [self._ok]), # prefer problematic logs
-ctime / 3600, # prefer older, at 1 hour granularity to allow randomization
-mtime / 3600, # prefer older, at 1 hour granularity to allow randomization
- )
+ )
def _find_packages_ready_for_testing(self):
if self._candidates_for_testing is None:
self._candidates_for_testing = [self.get_package(pn)
- for pn in self.get_pkg_names_in_state("waiting-to-be-tested")]
+ for pn in self.get_pkg_names_in_state("waiting-to-be-tested")]
self._candidates_for_testing = [p for p in self._candidates_for_testing
- if not self._logdb.log_exists(p, [self._reserved]) or \
- self._logdb.log_exists(p, [self._recycle])]
+ if not self._logdb.log_exists(p, [self._reserved]) or
+ self._logdb.log_exists(p, [self._recycle])]
if len(self._candidates_for_testing) > 1:
tuples = [(self._get_package_weight(p), random.random(), p)
- for p in self._candidates_for_testing]
+ for p in self._candidates_for_testing]
self._candidates_for_testing = [x[-1]
- for x in sorted(tuples, reverse=True)]
+ for x in sorted(tuples, reverse=True)]
return self._candidates_for_testing[:]
def _remove_unavailable_candidate(self, p):
diff --git a/piupartslib/pkgsummary.py b/piupartslib/pkgsummary.py
index 8b91f15..688df9e 100644
--- a/piupartslib/pkgsummary.py
+++ b/piupartslib/pkgsummary.py
@@ -18,7 +18,6 @@
# with this program; if not, see <http://www.gnu.org/licenses/>.
-
# Piuparts summary generation module
#
# This module is used to create exportable section and global package testing
@@ -85,6 +84,7 @@ import json
import datetime
from collections import namedtuple, defaultdict
+
class SummaryException(Exception):
pass
@@ -96,28 +96,29 @@ DEFSEC = 'overall'
FlagInfo = namedtuple('FlagInfo', ['word', 'priority', 'states'])
flaginfo = {
- 'F': FlagInfo('Failed', 0, ["failed-testing"]),
+ 'F': FlagInfo('Failed', 0, ["failed-testing"]),
'X': FlagInfo('Blocked', 1, [
- "cannot-be-tested",
- "dependency-failed-testing",
- "dependency-cannot-be-tested",
- "dependency-does-not-exist",
- ]),
+ "cannot-be-tested",
+ "dependency-failed-testing",
+ "dependency-cannot-be-tested",
+ "dependency-does-not-exist",
+ ]),
'W': FlagInfo('Waiting', 2, [
- "waiting-to-be-tested",
- "waiting-for-dependency-to-be-tested",
- ]),
+ "waiting-to-be-tested",
+ "waiting-for-dependency-to-be-tested",
+ ]),
'P': FlagInfo('Passed', 3, [
- "essential-required",
- "successfully-tested",
- ]),
+ "essential-required",
+ "successfully-tested",
+ ]),
'-': FlagInfo('Unknown', 4, [
- "does-not-exist",
- "unknown",
- ]),
- }
+ "does-not-exist",
+ "unknown",
+ ]),
+}
+
+state2flg = dict([(y, x[0]) for x in flaginfo.iteritems() for y in x[1].states])
-state2flg = dict([(y,x[0]) for x in flaginfo.iteritems() for y in x[1].states])
def worst_flag(*flags):
try:
@@ -127,6 +128,7 @@ def worst_flag(*flags):
return(flag)
+
def get_flag(state):
try:
flag = state2flg[state]
@@ -135,6 +137,7 @@ def get_flag(state):
return(flag)
+
def new_summary():
cdate_array = datetime.datetime.utcnow().ctime().split()
utcdate = " ".join(cdate_array[:-1] + ["UTC"] + [cdate_array[-1]])
@@ -144,19 +147,20 @@ def new_summary():
pkgstruct = defaultdict(lambda: defaultdict(lambda: dfltentry))
return({
- "_id" : SUMMID,
- "_version" : SUMMVER,
- "_date" : utcdate,
- "_comment" : "Debian Piuparts Package Results - " \
- "https://anonscm.debian.org/cgit/piuparts/" \
+ "_id": SUMMID,
+ "_version": SUMMVER,
+ "_date": utcdate,
+ "_comment": "Debian Piuparts Package Results - "
+ "https://anonscm.debian.org/cgit/piuparts/"
"piuparts.git/tree/piupartslib/pkgsummary.py",
- "_type" : "source",
- "packages" : pkgstruct,
- })
+ "_type": "source",
+ "packages": pkgstruct,
+ })
+
def add_summary(summary, rep_sec, pkg, flag, block_cnt, url):
if not flag in flaginfo or not isinstance(block_cnt, int) \
- or not url.startswith('http'):
+ or not url.startswith('http'):
raise SummaryException("Invalid summary argument")
pdict = summary["packages"]
@@ -170,6 +174,7 @@ def add_summary(summary, rep_sec, pkg, flag, block_cnt, url):
return summary
+
def merge_summary(gbl_summ, sec_summ):
spdict = sec_summ["packages"]
@@ -181,6 +186,7 @@ def merge_summary(gbl_summ, sec_summ):
return gbl_summ
+
def tooltip(summary, pkg):
"""Returns e.g. "Failed in testing and stable, blocking 5 packages"."""
@@ -210,10 +216,12 @@ def tooltip(summary, pkg):
return tip
+
def write_summary(summary, fname):
with open(fname, 'w') as fl:
json.dump(summary, fl, sort_keys=True, indent=1)
+
def read_summary(fname):
with open(fname, 'r') as fl:
result = json.load(fl)
@@ -229,7 +237,6 @@ if __name__ == '__main__':
# read a global summary file and return DDPO info by package
summary = read_summary(sys.argv[1])
-
for pkg in summary['packages']:
flag, blocked, url = summary['packages'][pkg][DEFSEC]
diff --git a/tests/test_config.py b/tests/test_config.py
index e68d8ea..bfbe889 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -3,6 +3,7 @@ import unittest
import piupartslib.conf as conf
import distro_info
+
class ConfStdDistroTests(unittest.TestCase):
def setUp(self):
@@ -15,7 +16,6 @@ class ConfStdDistroTests(unittest.TestCase):
self.testing = debdist.testing()
self.experimental = 'experimental'
-
def testConfStdDistroNames(self):
self.assertEqual(self.oldstable, 'squeeze')
self.assertEqual(self.stable, 'wheezy')
@@ -42,25 +42,25 @@ class ConfStdDistroTests(unittest.TestCase):
def testConfMapProposedDistro(self):
self.assertEqual(
- self.cobj._map_distro('stable-proposed'), 'stable')
+ self.cobj._map_distro('stable-proposed'), 'stable')
self.assertEqual(
- self.cobj._map_distro(self.stable + '-proposed'), 'stable')
+ self.cobj._map_distro(self.stable + '-proposed'), 'stable')
def testConfMapRemainingDistros(self):
self.assertEqual(self.cobj._map_distro('rc-buggy'), 'experimental')
self.assertEqual(
- self.cobj._map_distro('Debian6.0.9'),
+ self.cobj._map_distro('Debian6.0.9'),
self.cobj._map_distro('squeeze'))
self.assertEqual(
- self.cobj._map_distro('Debian7.4'),
+ self.cobj._map_distro('Debian7.4'),
self.cobj._map_distro('wheezy'))
self.assertEqual(
- self.cobj._map_distro('Debian8'),
+ self.cobj._map_distro('Debian8'),
self.cobj._map_distro('jessie'))
self.assertEqual(
- self.cobj._map_distro('Debian8.1'),
+ self.cobj._map_distro('Debian8.1'),
self.cobj._map_distro('jessie'))
def testConfGetStdDistro(self):
@@ -75,11 +75,11 @@ class ConfStdDistroTests(unittest.TestCase):
self.cobj.get_std_distro(['unknown', self.__dict__[std]]), std)
self.assertEqual(
self.cobj.get_std_distro(
- ['unknown', 'unknown', self.__dict__[std]]), std)
+ ['unknown', 'unknown', self.__dict__[std]]), std)
self.assertEqual(
self.cobj.get_std_distro(
- [self.__dict__[std], 'unknown', 'unknown']), std)
+ [self.__dict__[std], 'unknown', 'unknown']), std)
self.assertEqual(self.cobj.get_std_distro(['unknown']), 'unknown')
self.assertEqual(
- self.cobj.get_std_distro(['unknown', 'unknown']), 'unknown')
+ self.cobj.get_std_distro(['unknown', 'unknown']), 'unknown')
diff --git a/tests/test_pkgsummary.py b/tests/test_pkgsummary.py
index eb53e9f..6587ab7 100644
--- a/tests/test_pkgsummary.py
+++ b/tests/test_pkgsummary.py
@@ -7,6 +7,7 @@ import json
import piupartslib.pkgsummary as pkgsummary
+
class PkgSummaryTests(unittest.TestCase):
def testSummFlaginfoStateDups(self):
@@ -18,27 +19,26 @@ class PkgSummaryTests(unittest.TestCase):
self.assertEqual(states, nodups)
def testSummGetFlag(self):
- self.assertEqual( 'F', pkgsummary.get_flag('failed-testing'))
- self.assertEqual( 'X', pkgsummary.get_flag('dependency-does-not-exist'))
- self.assertEqual( 'P', pkgsummary.get_flag('successfully-tested'))
- self.assertEqual( 'W', pkgsummary.get_flag('waiting-to-be-tested'))
+ self.assertEqual('F', pkgsummary.get_flag('failed-testing'))
+ self.assertEqual('X', pkgsummary.get_flag('dependency-does-not-exist'))
+ self.assertEqual('P', pkgsummary.get_flag('successfully-tested'))
+ self.assertEqual('W', pkgsummary.get_flag('waiting-to-be-tested'))
with self.assertRaises(pkgsummary.SummaryException):
pkgsummary.get_flag('bogus-state')
def testSummWorstFlag(self):
- self.assertEqual( 'F', pkgsummary.worst_flag('F'))
- self.assertEqual( 'P', pkgsummary.worst_flag('P'))
- self.assertEqual( 'F', pkgsummary.worst_flag('P', 'F'))
- self.assertEqual( 'F', pkgsummary.worst_flag('F', 'F'))
- self.assertEqual( 'W', pkgsummary.worst_flag('W', 'P'))
- self.assertEqual( 'F', pkgsummary.worst_flag('W', 'P', 'F', 'X', '-'))
+ self.assertEqual('F', pkgsummary.worst_flag('F'))
+ self.assertEqual('P', pkgsummary.worst_flag('P'))
+ self.assertEqual('F', pkgsummary.worst_flag('P', 'F'))
+ self.assertEqual('F', pkgsummary.worst_flag('F', 'F'))
+ self.assertEqual('W', pkgsummary.worst_flag('W', 'P'))
+ self.assertEqual('F', pkgsummary.worst_flag('W', 'P', 'F', 'X', '-'))
with self.assertRaises(pkgsummary.SummaryException):
pkgsummary.worst_flag('Z')
-
class PkgSummaryAddTests(unittest.TestCase):
def setUp(self):
@@ -56,34 +56,34 @@ class PkgSummaryAddTests(unittest.TestCase):
def testSummAddArgValidation(self):
with self.assertRaises(pkgsummary.SummaryException):
pkgsummary.add_summary(
- self.summ, 'foodist', 'foopkg', 'Z', 0, 'http://foo')
+ self.summ, 'foodist', 'foopkg', 'Z', 0, 'http://foo')
with self.assertRaises(pkgsummary.SummaryException):
pkgsummary.add_summary(
- self.summ, 'foodist', 'foopkg', 'X', 'bogus',
+ self.summ, 'foodist', 'foopkg', 'X', 'bogus',
'http://foo')
with self.assertRaises(pkgsummary.SummaryException):
pkgsummary.add_summary(
- self.summ, 'foodist', 'foopkg', 'X', 1, 'ittp://foo')
+ self.summ, 'foodist', 'foopkg', 'X', 1, 'ittp://foo')
pkgsummary.add_summary(
- self.summ, 'foodist', 'foopkg', 'X', 1, 'http://foo')
+ self.summ, 'foodist', 'foopkg', 'X', 1, 'http://foo')
def testSummAddArgStorageFormat(self):
# store non-overlapping entries
pkgsummary.add_summary(self.summ, 'dist', 'pkg', 'X', 0, 'http://foo')
pkgsummary.add_summary(
- self.summ, 'dist', 'pkg2', 'W', 1, 'http://foo2')
+ self.summ, 'dist', 'pkg2', 'W', 1, 'http://foo2')
pkgsummary.add_summary(
- self.summ, 'dist2', 'pkg3', 'P', 2, 'http://foo3')
+ self.summ, 'dist2', 'pkg3', 'P', 2, 'http://foo3')
self.assertEqual(
- ['X', 0, 'http://foo'],
+ ['X', 0, 'http://foo'],
self.summ['packages']['pkg']['dist'])
self.assertEqual(
- ['W', 1, 'http://foo2'],
+ ['W', 1, 'http://foo2'],
self.summ['packages']['pkg2']['dist'])
self.assertEqual(
- ['P', 2, 'http://foo3'],
+ ['P', 2, 'http://foo3'],
self.summ['packages']['pkg3']['dist2'])
def testSummAddOverwriteFlag(self):
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/piuparts/piuparts.git
More information about the Piuparts-commits
mailing list