[Piuparts-commits] [SCM] piuparts git repository branch, bikeshed, updated. 0.52-19-g9617a30
Andreas Beckmann
anbe at debian.org
Tue May 28 12:53:22 UTC 2013
The following commit has been merged in the bikeshed branch:
commit f959c6c35b36e104e0ea8d57abbabc3b46cf73e4
Author: Andreas Beckmann <anbe at debian.org>
Date: Tue May 28 12:17:38 2013 +0200
*.py: move a bit towards pep8
Signed-off-by: Andreas Beckmann <anbe at debian.org>
diff --git a/master-bin/detect_well_known_errors.py b/master-bin/detect_well_known_errors.py
index 1080cde..a5a7d57 100755
--- a/master-bin/detect_well_known_errors.py
+++ b/master-bin/detect_well_known_errors.py
@@ -31,7 +31,7 @@ from piupartslib.conf import MissingSection
CONFIG_FILE = "/etc/piuparts/piuparts.conf"
DISTRO_CONFIG_FILE = "/etc/piuparts/distros.conf"
-KPR_DIRS = ( 'pass', 'bugged', 'affected', 'fail' )
+KPR_DIRS = ('pass', 'bugged', 'affected', 'fail')
KPR_EXT = '.kpr'
BUG_EXT = '.bug'
@@ -71,34 +71,34 @@ PKG_ERROR_TPL = \
$BUG</li>
"""
-class WKE_Config( piupartslib.conf.Config ):
+class WKE_Config(piupartslib.conf.Config):
"""Configuration parameters for Well Known Errors"""
- def __init__( self ):
+ def __init__(self):
self.section = 'global'
- piupartslib.conf.Config.__init__( self, self.section,
+ piupartslib.conf.Config.__init__(self, self.section,
{
"sections": "report",
"output-directory": "html",
"master-directory": ".",
"known-problem-directory": "@sharedir@/piuparts/known_problems",
"proxy": None,
- }, "" )
+ }, "")
-class WKE_Section_Config( piupartslib.conf.Config ):
+class WKE_Section_Config(piupartslib.conf.Config):
- def __init__( self, section ):
+ def __init__(self, section):
self.section = section
- piupartslib.conf.Config.__init__( self, self.section,
+ piupartslib.conf.Config.__init__(self, self.section,
{
"mirror": None,
"distro": None,
"area": None,
"arch": None,
"upgrade-test-distros": None,
- }, "", defaults_section="global" )
+ }, "", defaults_section="global")
class Problem():
""" Encapsulate a particular known problem """
@@ -108,12 +108,12 @@ class Problem():
self.probpath = probpath
self.name = os.path.basename(probpath)
- self.short_name = os.path.splitext( self.name )[0]
+ self.short_name = os.path.splitext(self.name)[0]
self.tags_are_valid = True
- self.required_tags = [ "PATTERN", "WHERE", "ISSUE",
- "HEADER", "HELPTEXT"]
+ self.required_tags = ["PATTERN", "WHERE", "ISSUE",
+ "HEADER", "HELPTEXT"]
self.optional_tags = ["EXCLUDE_PATTERN", "EXPLAIN", "PRIORITY"]
@@ -124,12 +124,12 @@ class Problem():
self.tags_are_valid = False
if "PATTERN" in self.__dict__:
- self.inc_re = re.compile( self.PATTERN )
+ self.inc_re = re.compile(self.PATTERN)
else:
self.inc_re = None
if "EXCLUDE_PATTERN" in self.__dict__:
- self.exc_re = re.compile( self.EXCLUDE_PATTERN )
+ self.exc_re = re.compile(self.EXCLUDE_PATTERN)
else:
self.exc_re = None
@@ -139,21 +139,21 @@ class Problem():
def init_problem(self):
"""Load problem file parameters (HELPTEXT="foo" -> self.HELPTEXT)"""
- pb = open( self.probpath, 'r' )
+ pb = open(self.probpath, 'r')
probbody = pb.read()
pb.close()
- tagged = re.sub( "^([A-Z]+=)", "<hdr>\g<0>", probbody, 0, re.MULTILINE)
+ tagged = re.sub("^([A-Z]+=)", "<hdr>\g<0>", probbody, 0, re.MULTILINE)
- for chub in re.split( '<hdr>', tagged )[1:]:
+ for chub in re.split('<hdr>', tagged)[1:]:
- (name,value) = re.split( "=", chub, 1, re.MULTILINE )
+ (name, value) = re.split("=", chub, 1, re.MULTILINE)
while value[-1] == '\n':
value = value[:-1]
- if re.search( "^\'.+\'$", value, re.MULTILINE|re.DOTALL ) \
- or re.search( '^\".+\"$', value, re.MULTILINE|re.DOTALL ):
+ if re.search("^\'.+\'$", value, re.MULTILINE|re.DOTALL) \
+ or re.search('^\".+\"$', value, re.MULTILINE|re.DOTALL):
value = value[1:-1]
if name in self.required_tags or name in self.optional_tags:
@@ -167,14 +167,14 @@ class Problem():
"""Does the log text 'logbody' contain this known problem?"""
if where in self.WHERE:
- if self.inc_re.search( logbody, re.MULTILINE ):
+ if self.inc_re.search(logbody, re.MULTILINE):
for line in logbody.splitlines():
- if self.inc_re.search( line ):
+ if self.inc_re.search(line):
if self.exc_re == None \
or not self.exc_re.search(line):
- return( True )
+ return True
- return( False )
+ return False
def get_command(self):
@@ -183,7 +183,7 @@ class Problem():
if "EXCLUDE_PATTERN" in self.__dict__:
cmd += " | grep -v -E \"%s\"" % self.EXCLUDE_PATTERN
- return(cmd)
+ return cmd
class FailureManager():
"""Class to track known failures encountered, by package,
@@ -203,30 +203,30 @@ class FailureManager():
for pkgspec in self.logdict:
logpath = self.logdict[pkgspec]
try:
- kp = open( get_kpr_path(logpath), 'r' )
+ kp = open(get_kpr_path(logpath), 'r')
for line in kp.readlines():
- (where, problem) = self.parse_kpr_line( line )
+ (where, problem) = self.parse_kpr_line(line)
- self.failures.append( make_failure(where, problem, pkgspec) )
+ self.failures.append(make_failure(where, problem, pkgspec))
kp.close()
except IOError:
print "Error processing %s" % get_kpr_path(logpath)
- def parse_kpr_line( self, line ):
+ def parse_kpr_line(self, line):
"""Parse a line in a kpr file into where (e.g. 'pass') and problem name"""
- m = re.search( "^([a-z]+)/.+ (.+)$", line )
- return( m.group(1), m.group(2) )
+ m = re.search("^([a-z]+)/.+ (.+)$", line)
+ return (m.group(1), m.group(2))
- def sort_by_path( self ):
+ def sort_by_path(self):
self.failures.sort(key=lambda x: self.logdict[x.pkgspec])
- def sort_by_bugged_and_rdeps( self, pkgsdb ):
+ def sort_by_bugged_and_rdeps(self, pkgsdb):
self.pkgsdb = pkgsdb
- def keyfunc( x, pkgsdb=self.pkgsdb, logdict=self.logdict):
+ def keyfunc(x, pkgsdb=self.pkgsdb, logdict=self.logdict):
pkg_obj = pkgsdb.get_package(get_pkg(x.pkgspec))
@@ -237,43 +237,43 @@ class FailureManager():
is_failed = get_where(logdict[x.pkgspec]) == "fail"
- return( (not is_failed, -rdeps, logdict[x.pkgspec]) )
+ return (not is_failed, -rdeps, logdict[x.pkgspec])
- self.failures.sort( key=keyfunc )
+ self.failures.sort(key=keyfunc)
- def filtered( self, problem ):
- return([x for x in self.failures if problem==x.problem])
+ def filtered(self, problem):
+ return [x for x in self.failures if problem==x.problem]
-def make_failure( where, problem, pkgspec ):
- return(namedtuple('Failure', 'where problem pkgspec')(where, problem, pkgspec))
+def make_failure(where, problem, pkgspec):
+ return (namedtuple('Failure', 'where problem pkgspec')(where, problem, pkgspec))
-def get_where( logpath ):
+def get_where(logpath):
"""Convert a path to a log file to the 'where' component (e.g. 'pass')"""
- return( logpath.split('/')[-2] )
+ return logpath.split('/')[-2]
-def replace_ext( fpath, newext ):
- basename = os.path.splitext( os.path.split(fpath)[1] )[0]
- return('/'.join( fpath.split('/')[:-1] + [basename + newext] ))
+def replace_ext(fpath, newext):
+ basename = os.path.splitext(os.path.split(fpath)[1])[0]
+ return '/'.join(fpath.split('/')[:-1] + [basename + newext])
-def get_pkg( pkgspec ):
- return( pkgspec.split('_')[0] )
+def get_pkg(pkgspec):
+ return pkgspec.split('_')[0]
-def get_kpr_path( logpath ):
+def get_kpr_path(logpath):
"""Return the kpr file path for a particular log path"""
- return( replace_ext( logpath, KPR_EXT ) )
+ return replace_ext(logpath, KPR_EXT)
-def pts_subdir( source ):
+def pts_subdir(source):
if source[:3] == "lib":
return source[:4]
else:
return source[:1]
-def source_pkg( pkgspec, db ):
+def source_pkg(pkgspec, db):
source_name = db.get_control_header(get_pkg(pkgspec), "Source")
- return( source_name )
+ return source_name
-def get_file_dict( workdirs, ext ):
+def get_file_dict(workdirs, ext):
"""For files in [workdirs] with extension 'ext', create a dict of
<pkgname>_<version>: <path>"""
@@ -283,34 +283,34 @@ def get_file_dict( workdirs, ext ):
for fl in os.listdir(dir):
if os.path.splitext(fl)[1] == ext:
filedict[os.path.splitext(os.path.basename(fl))[0]] \
- = os.path.join(dir,fl)
+ = os.path.join(dir, fl)
return filedict
-def get_pkgspec( logpath ):
+def get_pkgspec(logpath):
"""For a log full file spec, return the pkgspec (<pkg>_<version)"""
- return( logpath.split('/')[-1] )
+ return logpath.split('/')[-1]
def get_bug_text(logpath):
bugpath = replace_ext(logpath, BUG_EXT)
txt = ""
if os.path.exists(bugpath):
- bf = open( bugpath, 'r' )
+ bf = open(bugpath, 'r')
txt = bf.read()
bf.close()
return txt
-def section_path( logpath ):
+def section_path(logpath):
"""Convert a full log path name to one relative to the section directory"""
- return( '/'.join( [get_where(logpath), get_pkgspec(logpath)] ) )
+ return '/'.join([get_where(logpath), get_pkgspec(logpath)])
-def mtime( path ):
+def mtime(path):
return os.path.getmtime(path)
-def clean_cache_files( logdict, cachedict, recheck=False, recheck_failed=False,
- skipnewer=False ):
+def clean_cache_files(logdict, cachedict, recheck=False, recheck_failed=False,
+ skipnewer=False):
"""Delete files in cachedict if the corresponding logdict file is missing
or newer"""
@@ -332,9 +332,9 @@ def clean_cache_files( logdict, cachedict, recheck=False, recheck_failed=False,
# logfile may have disappeared
pass
- return( count )
+ return count
-def make_kprs( logdict, kprdict, problem_list ):
+def make_kprs(logdict, kprdict, problem_list):
"""Create kpr files, as necessary, so every log file has one
kpr entries are e.g.
fail/xorg-docs_1:1.6-1.log broken_symlinks_error.conf"""
@@ -345,32 +345,32 @@ def make_kprs( logdict, kprdict, problem_list ):
logpath = logdict[pkg_spec]
try:
- lb = open( logpath, 'r' )
+ lb = open(logpath, 'r')
logbody = lb.read()
lb.close()
- where = get_where( logpath )
+ where = get_where(logpath)
- kf = open( get_kpr_path(logpath), 'a')
+ kf = open(get_kpr_path(logpath), 'a')
for problem in problem_list:
- if( problem.has_problem( logbody, where ) ):
- kf.write( "%s/%s.log %s\n" % (where, pkg_spec, problem.name) )
+ if problem.has_problem(logbody, where):
+ kf.write("%s/%s.log %s\n" % (where, pkg_spec, problem.name))
kf.close()
except IOError:
print "File error processing %s" % logpath
- return( len(needs_kpr) )
+ return len(needs_kpr)
-def populate_tpl( tmpl, vals ):
+def populate_tpl(tmpl, vals):
for key in vals:
- tmpl = re.sub( "\$%s" % key, str(vals[key]), tmpl )
+ tmpl = re.sub("\$%s" % key, str(vals[key]), tmpl)
return tmpl
-def update_tpl( basedir, section, problem, failures, logdict, ftpl, ptpl, pkgsdb ):
+def update_tpl(basedir, section, problem, failures, logdict, ftpl, ptpl, pkgsdb):
pkg_text = ""
bugged_section = False
@@ -399,42 +399,42 @@ def update_tpl( basedir, section, problem, failures, logdict, ftpl, ptpl, pkgsdb
'RDEPS': rdep_cnt,
'SDIR':pts_subdir(src_pkg),
'SPKG':src_pkg,
- } )
+ })
if len(pkg_text):
- pf = open(os.path.join(basedir, failures[0].problem[:-5] + TPL_EXT),'w')
- tpl_text = populate_tpl( ptpl, {
+ pf = open(os.path.join(basedir, failures[0].problem[:-5] + TPL_EXT), 'w')
+ tpl_text = populate_tpl(ptpl, {
'HEADER': problem.HEADER,
'SECTION': section,
'HELPTEXT': problem.HELPTEXT,
'COMMAND': problem.get_command(),
'PACKAGE_LIST': pkg_text,
'COUNT': len(failures),
- } )
+ })
- pf.write( tpl_text )
+ pf.write(tpl_text)
pf.close()
-def update_html( section, logdict, problem_list, failures, config, pkgsdb ):
+def update_html(section, logdict, problem_list, failures, config, pkgsdb):
- html_dir = os.path.join( config['output-directory'], section )
- if not os.path.exists( html_dir ):
+ html_dir = os.path.join(config['output-directory'], section)
+ if not os.path.exists(html_dir):
os.makedirs(html_dir)
for problem in problem_list:
- update_tpl( html_dir, section, problem,
- failures.filtered(problem.name),
- logdict,
- PKG_ERROR_TPL, PROB_TPL, pkgsdb )
+ update_tpl(html_dir, section, problem,
+ failures.filtered(problem.name),
+ logdict,
+ PKG_ERROR_TPL, PROB_TPL, pkgsdb)
# Make a failure list of all failed packages that don't show up as known
failedpkgs = set([x for x in logdict.keys()
if get_where(logdict[x]) != 'pass'])
knownfailpkgs = set([failure.pkgspec for failure in failures.failures])
- unknownsasfailures = [make_failure("","unknown_failures.conf",x)
+ unknownsasfailures = [make_failure("", "unknown_failures.conf", x)
for x in failedpkgs.difference(knownfailpkgs)]
- def keyfunc( x, pkgsdb=pkgsdb, logdict=logdict):
+ def keyfunc(x, pkgsdb=pkgsdb, logdict=logdict):
pkg_obj = pkgsdb.get_package(get_pkg(x.pkgspec))
@@ -445,39 +445,39 @@ def update_html( section, logdict, problem_list, failures, config, pkgsdb ):
is_failed = get_where(logdict[x.pkgspec]) == "fail"
- return( (not is_failed, -rdeps, logdict[x.pkgspec]) )
+ return (not is_failed, -rdeps, logdict[x.pkgspec])
- unknownsasfailures.sort( key=keyfunc )
+ unknownsasfailures.sort(key=keyfunc)
- update_tpl( html_dir, section, problem_list[0], unknownsasfailures,
- logdict,
- PKG_ERROR_TPL, UNKNOWN_TPL, pkgsdb )
+ update_tpl(html_dir, section, problem_list[0], unknownsasfailures,
+ logdict,
+ PKG_ERROR_TPL, UNKNOWN_TPL, pkgsdb)
-def process_section( section, config, problem_list,
- recheck=False, recheck_failed=False, pkgsdb=None ):
+def process_section(section, config, problem_list,
+ recheck=False, recheck_failed=False, pkgsdb=None):
""" Update .bug and .kpr files for logs in this section """
# raises MissingSection if the section does not exist in piuparts.conf
- section_config = WKE_Section_Config( section )
- section_config.read( CONFIG_FILE )
+ section_config = WKE_Section_Config(section)
+ section_config.read(CONFIG_FILE)
- sectiondir = os.path.join( config['master-directory'], section )
- workdirs = [ os.path.join(sectiondir,x) for x in KPR_DIRS ]
+ sectiondir = os.path.join(config['master-directory'], section)
+ workdirs = [os.path.join(sectiondir, x) for x in KPR_DIRS]
- if not os.access( sectiondir, os.F_OK ):
- raise MissingSection( "", section )
+ if not os.access(sectiondir, os.F_OK):
+ raise MissingSection("", section)
[os.mkdir(x) for x in workdirs if not os.path.exists(x)]
- (logdict, kprdict, bugdict) = [ get_file_dict(workdirs, x ) \
- for x in [LOG_EXT, KPR_EXT, BUG_EXT] ]
+ (logdict, kprdict, bugdict) = [get_file_dict(workdirs, x)
+ for x in [LOG_EXT, KPR_EXT, BUG_EXT]]
- del_cnt = clean_cache_files( logdict, kprdict, recheck, recheck_failed )
- clean_cache_files( logdict, bugdict, skipnewer=True )
+ del_cnt = clean_cache_files(logdict, kprdict, recheck, recheck_failed)
+ clean_cache_files(logdict, bugdict, skipnewer=True)
- (kprdict, bugdict) = [get_file_dict(workdirs,x) for x in [KPR_EXT, BUG_EXT]]
+ (kprdict, bugdict) = [get_file_dict(workdirs, x) for x in [KPR_EXT, BUG_EXT]]
- add_cnt = make_kprs( logdict, kprdict, problem_list )
+ add_cnt = make_kprs(logdict, kprdict, problem_list)
if not pkgsdb:
distro_config = piupartslib.conf.DistroConfig(
@@ -489,7 +489,7 @@ def process_section( section, config, problem_list,
pkgs_url = distro_config.get_packages_url(
section_config.get_distro(),
section_config.get_area(),
- section_config.get_arch() )
+ section_config.get_arch())
pkg_fl = piupartslib.open_packages_url(pkgs_url)
pkgsdb.read_packages_file(pkg_fl)
pkg_fl.close()
@@ -497,23 +497,23 @@ def process_section( section, config, problem_list,
pkgsdb.compute_package_states()
pkgsdb.calc_rrdep_counts()
- failures = FailureManager( logdict )
+ failures = FailureManager(logdict)
failures.sort_by_bugged_and_rdeps(pkgsdb)
- update_html( section, logdict, problem_list, failures, config, pkgsdb )
+ update_html(section, logdict, problem_list, failures, config, pkgsdb)
- return( del_cnt, add_cnt, failures )
+ return (del_cnt, add_cnt, failures)
-def detect_well_known_errors( config, problem_list, recheck, recheck_failed ):
+def detect_well_known_errors(config, problem_list, recheck, recheck_failed):
for section in config['sections'].split():
try:
- print time.strftime( "%a %b %2d %H:%M:%S %Z %Y", time.localtime() )
+ print time.strftime("%a %b %2d %H:%M:%S %Z %Y", time.localtime())
print "%s:" % section
- ( del_cnt, add_cnt, failures ) = \
- process_section( section, config, problem_list,
- recheck, recheck_failed )
+ (del_cnt, add_cnt, failures) = \
+ process_section(section, config, problem_list,
+ recheck, recheck_failed)
print "parsed logfiles: %d removed, %d added" % (del_cnt, add_cnt)
@@ -524,14 +524,14 @@ def detect_well_known_errors( config, problem_list, recheck, recheck_failed ):
except MissingSection:
pass
- print time.strftime( "%a %b %2d %H:%M:%S %Z %Y", time.localtime() )
+ print time.strftime("%a %b %2d %H:%M:%S %Z %Y", time.localtime())
-def create_problem_list( pdir ):
+def create_problem_list(pdir):
plist = []
for pfile in [x for x in sorted(os.listdir(pdir)) if x.endswith(".conf")]:
- prob = Problem(os.path.join(pdir,pfile))
+ prob = Problem(os.path.join(pdir, pfile))
if prob.valid():
plist.append(prob)
@@ -549,25 +549,25 @@ This script processes all log files against defined "known_problem" files,
caching the problems found, by package, into ".kpr" files. The cached data
is summarized into html ".tpl" files in <html_dir>/<section>, which are then
incorporated by piuparts-report into the final web reports.
-""" )
+""")
- parser.add_argument( '--recheck', dest='recheck', action='store_true',
- help="recheck all log files (delete cache)" )
+ parser.add_argument('--recheck', dest='recheck', action='store_true',
+ help="recheck all log files (delete cache)")
- parser.add_argument( '--recheck-failed', dest='recheck_failed',
+ parser.add_argument('--recheck-failed', dest='recheck_failed',
action='store_true',
- help="recheck failed log files (delete cache)" )
+ help="recheck failed log files (delete cache)")
args = parser.parse_args()
conf = WKE_Config()
- conf.read( CONFIG_FILE )
+ conf.read(CONFIG_FILE)
if conf["proxy"]:
os.environ["http_proxy"] = conf["proxy"]
- problem_list = create_problem_list( conf['known-problem-directory'] )
+ problem_list = create_problem_list(conf['known-problem-directory'])
- detect_well_known_errors( conf, problem_list, args.recheck,
- args.recheck_failed )
+ detect_well_known_errors(conf, problem_list, args.recheck,
+ args.recheck_failed)
# vi:set et ts=4 sw=4 :
diff --git a/piuparts-analyze.py b/piuparts-analyze.py
index 9fdd570..7f63e8f 100644
--- a/piuparts-analyze.py
+++ b/piuparts-analyze.py
@@ -69,11 +69,11 @@ def package_version(log):
def package_source_version(log):
version = package_version(log)
- possible_binnmu_part = version.rsplit('+',1)[-1]
+ possible_binnmu_part = version.rsplit('+', 1)[-1]
if possible_binnmu_part.startswith('b') and possible_binnmu_part[1:].isdigit():
# the package version contains a binnmu-part which is not part of the source version
# and therefore not accepted/tracked by the bts. Remove it.
- version = version.rsplit('+',1)[0]
+ version = version.rsplit('+', 1)[0]
return version
diff --git a/piuparts-master-backend.py b/piuparts-master-backend.py
index d11a60f..1e83ec4 100644
--- a/piuparts-master-backend.py
+++ b/piuparts-master-backend.py
@@ -274,7 +274,7 @@ class Master(Protocol):
def dump_pkgs(self):
for st in self._binary_db.get_states():
for name in self._binary_db.get_pkg_names_in_state(st):
- logging.debug("%s : %s\n" % (st,name))
+ logging.debug("%s : %s\n" % (st, name))
def _switch_section(self, command, args):
self._check_args(1, command, args)
diff --git a/piuparts-report.py b/piuparts-report.py
index 65e4416..86f7c71 100644
--- a/piuparts-report.py
+++ b/piuparts-report.py
@@ -572,7 +572,7 @@ def read_file(filename):
f.close()
return l
-def create_section_navigation(section_names,current_section, doc_root):
+def create_section_navigation(section_names, current_section, doc_root):
tablerows = ""
for section in section_names:
tablerows += ("<tr class=\"normalrow\"><td class=\"contentcell\"><a href='%s/%s'>%s</a></td></tr>\n") % \
@@ -702,7 +702,7 @@ class Section:
htmlpage = string.Template(HTML_HEADER + LOG_LIST_BODY_TEMPLATE + HTML_FOOTER)
f = file(filename, "w")
- f.write(htmlpage.safe_substitute( {
+ f.write(htmlpage.safe_substitute({
"page_title": html_protect(title+" in "+self._config.section),
"section_navigation":
create_section_navigation(self._section_names,
@@ -776,8 +776,8 @@ class Section:
def link_to_maintainer_summary(self, maintainer):
email = get_email_address(maintainer)
return "<a href=\"%s/%s/maintainer/%s/%s.html\">%s</a>" \
- % (self._doc_root,self._config.section,maintainer_subdir(email),
- email,html_protect(maintainer))
+ % (self._doc_root, self._config.section, maintainer_subdir(email),
+ email, html_protect(maintainer))
def link_to_uploaders(self, uploaders):
link = ""
@@ -918,8 +918,8 @@ class Section:
htmlpage = string.Template(HTML_HEADER + MAINTAINER_BODY_TEMPLATE + HTML_FOOTER)
filename = os.path.join(maintainer_subdir_path, maintainer + ".html")
f = file(filename, "w")
- f.write(htmlpage.safe_substitute( {
- "page_title": html_protect( "Status of " \
+ f.write(htmlpage.safe_substitute({
+ "page_title": html_protect("Status of " \
+ maintainer \
+ " packages in " \
+ self._config.section),
@@ -972,8 +972,8 @@ class Section:
+ "<td class=\"%s\">piuparts-result:</td>" \
% state_style \
+ "<td class=\"contentcell2\">%s %s</td>" \
- % ( self.link_to_state_page(self._config.section,binary,state),
- self.links_to_logs(binary, state, logs_by_dir) ) \
+ % (self.link_to_state_page(self._config.section, binary, state),
+ self.links_to_logs(binary, state, logs_by_dir)) \
+ "<td class=\"labelcell\">Version:</td>" \
+ "<td class=\"contentcell2\">%s</td>" \
% html_protect(binary_version) \
@@ -999,7 +999,7 @@ class Section:
+ "<td class=\"labelcell\">Source:</td>" \
+ "<td class=\"contentcell2\">" \
+ "<a href=\"http://packages.qa.debian.org/%s\" target=\"_blank\">%s</a>" \
- % ( source, html_protect(source) ) \
+ % (source, html_protect(source)) \
+ "</td>" \
+ "<td class=\"labelcell\">piuparts summary:</td>" \
+ "<td class=\"contentcell2\">%s</td>" \
@@ -1022,7 +1022,7 @@ class Section:
% self.link_to_uploaders(uploaders) \
+ "</tr>\n"
- source_summary_page_path = os.path.join( self._output_directory,
+ source_summary_page_path = os.path.join(self._output_directory,
"source",
source_subdir(source))
@@ -1033,9 +1033,9 @@ class Section:
htmlpage = string.Template(HTML_HEADER + SOURCE_PACKAGE_BODY_TEMPLATE + HTML_FOOTER)
f = file(filename, "w")
- f.write(htmlpage.safe_substitute( {
+ f.write(htmlpage.safe_substitute({
"page_title": html_protect("Status of source package "+source+" in "+self._config.section),
- "section_navigation": create_section_navigation(self._section_names,self._config.section,self._doc_root),
+ "section_navigation": create_section_navigation(self._section_names, self._config.section, self._doc_root),
"time": time.strftime("%Y-%m-%d %H:%M %Z"),
"rows": sourcerows+binaryrows,
"doc_root": self._doc_root,
@@ -1111,7 +1111,7 @@ class Section:
return stats_html
- def create_and_link_to_analysises(self,state):
+ def create_and_link_to_analysises(self, state):
link="<ul>"
for template, linktarget in linktarget_by_template:
# sucessful logs only have issues and failed logs only have errors
@@ -1129,9 +1129,9 @@ class Section:
htmlpage = string.Template(HTML_HEADER + ANALYSIS_BODY_TEMPLATE + HTML_FOOTER)
filename = os.path.join(self._output_directory, template[:-len(".tpl")]+".html")
f = file(filename, "w")
- f.write(htmlpage.safe_substitute( {
+ f.write(htmlpage.safe_substitute({
"page_title": html_protect("Packages in state "+state+" "+linktarget),
- "section_navigation": create_section_navigation(self._section_names,self._config.section,self._doc_root),
+ "section_navigation": create_section_navigation(self._section_names, self._config.section, self._doc_root),
"time": time.strftime("%Y-%m-%d %H:%M %Z"),
"rows": rows,
"doc_root": self._doc_root,
@@ -1168,7 +1168,7 @@ class Section:
link = ""
return link
- def write_section_index_page(self,dirs,total_packages):
+ def write_section_index_page(self, dirs, total_packages):
tablerows = ""
for state in self._binary_db.get_active_states():
dir_link = ""
@@ -1211,9 +1211,9 @@ class Section:
description = self._config["description"][:-1] + " " + description
elif self._config["description"]:
description = self._config["description"]
- write_file(os.path.join(self._output_directory, "index.html"), htmlpage.safe_substitute( {
+ write_file(os.path.join(self._output_directory, "index.html"), htmlpage.safe_substitute({
"page_title": html_protect(self._config.section+" statistics"),
- "section_navigation": create_section_navigation(self._section_names,self._config.section,self._doc_root),
+ "section_navigation": create_section_navigation(self._section_names, self._config.section, self._doc_root),
"time": time.strftime("%Y-%m-%d %H:%M %Z"),
"section": html_protect(self._config.section),
"description": html_protect(description),
@@ -1246,15 +1246,15 @@ class Section:
def cmp_func(a, b):
"""Sort by block count first"""
- rrdep_cmp = cmp( a.block_count(), b.block_count())
+ rrdep_cmp = cmp(a.block_count(), b.block_count())
if rrdep_cmp != 0:
return -rrdep_cmp
else:
- return cmp( a["Package"], b["Package"] )
+ return cmp(a["Package"], b["Package"])
names = self._binary_db.get_pkg_names_in_state(state)
packages = [self._binary_db.get_package(name) for name in names]
- packages.sort( cmp_func )
+ packages.sort(cmp_func)
for package in packages:
vlist += "<li id=\"%s\">%s" % (
@@ -1269,23 +1269,23 @@ class Section:
for alternatives in all_deps:
dep = alternatives[0]
vlist += "<li>dependency %s is %s</li>\n" % \
- (self.link_to_state_page(self._config.section,dep,dep),
+ (self.link_to_state_page(self._config.section, dep, dep),
emphasize_reason(html_protect(self._binary_db.get_package_state(dep, resolve_virtual=False))))
vlist += self._show_providers(dep)
if len(alternatives) > 1:
vlist += "\n<ul>\n"
for dep in alternatives[1:]:
vlist += "<li>alternative dependency %s is %s</li>\n" % \
- (self.link_to_state_page(self._config.section,dep,dep),
+ (self.link_to_state_page(self._config.section, dep, dep),
emphasize_reason(html_protect(self._binary_db.get_package_state(dep, resolve_virtual=False))))
vlist += self._show_providers(dep)
vlist += "</ul>\n"
vlist += "</ul>\n"
vlist += "</li>\n"
htmlpage = string.Template(HTML_HEADER + STATE_BODY_TEMPLATE + HTML_FOOTER)
- write_file(os.path.join(self._output_directory, "state-%s.html" % state), htmlpage.safe_substitute( {
+ write_file(os.path.join(self._output_directory, "state-%s.html" % state), htmlpage.safe_substitute({
"page_title": html_protect("Packages in state "+state+" in "+self._config.section),
- "section_navigation": create_section_navigation(self._section_names,self._config.section,self._doc_root),
+ "section_navigation": create_section_navigation(self._section_names, self._config.section, self._doc_root),
"time": time.strftime("%Y-%m-%d %H:%M %Z"),
"state": html_protect(state),
"section": html_protect(self._config.section),
@@ -1392,12 +1392,12 @@ def main():
# static pages
logging.debug("Writing static pages")
for page in ("index", "bug_howto"):
- tpl = os.path.join(output_directory,page+".tpl")
+ tpl = os.path.join(output_directory, page+".tpl")
INDEX_BODY = "".join(read_file(tpl))
htmlpage = string.Template(HTML_HEADER + INDEX_BODY + HTML_FOOTER)
- write_file(os.path.join(output_directory,page+".html"), htmlpage.safe_substitute( {
+ write_file(os.path.join(output_directory, page+".html"), htmlpage.safe_substitute({
"page_title": "About piuparts.debian.org and News",
- "section_navigation": create_section_navigation(section_names,"sid",doc_root),
+ "section_navigation": create_section_navigation(section_names, "sid", doc_root),
"time": time.strftime("%Y-%m-%d %H:%M %Z"),
"doc_root": doc_root,
}))
diff --git a/piuparts-slave.py b/piuparts-slave.py
index ccc9ed7..a2f7af7 100644
--- a/piuparts-slave.py
+++ b/piuparts-slave.py
@@ -704,7 +704,7 @@ class Section:
if ret == 0:
output.write("Executing: %s\n" % " ".join(command))
- ret,f = run_test_with_timeout(command, MAX_WAIT_TEST_RUN)
+ ret, f = run_test_with_timeout(command, MAX_WAIT_TEST_RUN)
if not f or f[-1] != '\n':
f += '\n'
output.write(f)
@@ -783,7 +783,7 @@ def run_test_with_timeout(cmd, maxwait, kill_all=True):
alarm(0)
except Alarm:
terminate_subprocess(p, kill_all)
- return -1,stdout
+ return -1, stdout
except KeyboardInterrupt:
print '\nSlave interrupted by the user, cleaning up...'
try:
@@ -797,7 +797,7 @@ def run_test_with_timeout(cmd, maxwait, kill_all=True):
if ret in [124, 137]:
# process was terminated by the timeout command
ret = -ret
- return ret,stdout
+ return ret, stdout
def create_chroot(config, tarball, distro):
diff --git a/piuparts.py b/piuparts.py
index b705102..e69e8a6 100644
--- a/piuparts.py
+++ b/piuparts.py
@@ -452,7 +452,7 @@ def run(command, ignore_errors=False, timeout=0):
env = os.environ.copy()
env["LC_ALL"] = "C"
env["LANGUAGES"] = ""
- env["PIUPARTS_OBJECTS"] = ' '.join(str(vobject) for vobject in settings.testobjects )
+ env["PIUPARTS_OBJECTS"] = ' '.join(str(vobject) for vobject in settings.testobjects)
devnull = open('/dev/null', 'r')
p = subprocess.Popen(command, env=env, stdin=devnull,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
@@ -1510,10 +1510,10 @@ class Chroot:
for p in packages:
basename = p + ".list"
- if not os.path.exists(os.path.join(vdir,basename)):
+ if not os.path.exists(os.path.join(vdir, basename)):
continue
- f = file(os.path.join(vdir,basename), "r")
+ f = file(os.path.join(vdir, basename), "r")
for line in f:
pathname = line.strip()
if pathname.startswith("/etc/cron."):
@@ -1557,10 +1557,10 @@ class Chroot:
for p in packages:
basename = p + ".list"
- if not os.path.exists(os.path.join(vdir,basename)):
+ if not os.path.exists(os.path.join(vdir, basename)):
continue
- f = file(os.path.join(vdir,basename), "r")
+ f = file(os.path.join(vdir, basename), "r")
for line in f:
pathname = line.strip()
if pathname.startswith("/etc/logrotate.d/"):
@@ -1629,7 +1629,7 @@ class VirtServ(Chroot):
def _awaitok(self, cmd):
r = self._vs.stdout.readline().rstrip('\n')
l = r.split(' ')
- if l[0] != 'ok': self._fail('virtserver response to %s: %s' % (cmd,r))
+ if l[0] != 'ok': self._fail('virtserver response to %s: %s' % (cmd, r))
logging.debug('adt-virt << %s', r)
return l[1:]
@@ -1639,7 +1639,7 @@ class VirtServ(Chroot):
if type(a) != type(()): return a
(a,) = a
return urllib.quote(a)
- cmd = ' '.join(map(maybe_quote,cmd))
+ cmd = ' '.join(map(maybe_quote, cmd))
logging.debug('adt-virt >> %s', cmd)
print >>self._vs.stdin, cmd
return cmd.split(' ')[0]
@@ -1653,8 +1653,8 @@ class VirtServ(Chroot):
def _getfilecontents(self, filename):
try:
- (_,tf) = create_temp_file()
- self._command(['copyup',(filename,),(tf,)])
+ (_, tf) = create_temp_file()
+ self._command(['copyup', (filename,), (tf,)])
f = file(tf)
d = f.read()
f.close()
@@ -1682,7 +1682,7 @@ class VirtServ(Chroot):
self._command('close')
dont_do_on_panic(self.panic_handler_id)
- def _fail(self,m):
+ def _fail(self, m):
logging.error("adt-virt-* error: "+m)
panic()
@@ -1707,19 +1707,19 @@ class VirtServ(Chroot):
def _execute(self, cmdl, tolerate_errors=False):
assert type(cmdl) == type([])
- prefix = ['sh','-ec','''
+ prefix = ['sh', '-ec', '''
LC_ALL=C
unset LANGUAGES
export LC_ALL
exec 2>&1
exec "$@"
- ''','<command>']
+ ''', '<command>']
ca = ','.join(map(urllib.quote, prefix + cmdl))
stdout = '%s/cmd-stdout' % self._scratch
stderr = '%s/cmd-stderr-base' % self._scratch
- cmd = ['execute',ca,
- '/dev/null',(stdout,),(stderr,),
- '/root','timeout=600']
+ cmd = ['execute', ca,
+ '/dev/null', (stdout,), (stderr,),
+ '/root', 'timeout=600']
es = int(self._command(cmd)[0])
if es and not tolerate_errors:
stderr_data = self._getfilecontents(stderr)
@@ -1729,25 +1729,25 @@ class VirtServ(Chroot):
return (es, stdout, stderr)
def _execute_getoutput(self, cmdl):
- (es,stdout,stderr) = self._execute(cmdl)
+ (es, stdout, stderr) = self._execute(cmdl)
stderr_data = self._getfilecontents(stderr)
if es or stderr_data:
logging.error('Internal command failed (status=%d): %s\n%s' %
(es, `cmdl`, indent_string(stderr_data)))
panic()
- (_,tf) = create_temp_file()
+ (_, tf) = create_temp_file()
try:
- self._command(['copyup',(stdout,),(tf,)])
+ self._command(['copyup', (stdout,), (tf,)])
except:
os.remove(tf)
raise
return tf
def run(self, command, ignore_errors=False):
- cmdl = ['sh','-ec','cd /\n' + ' '.join(command)]
- (es,stdout,stderr) = self._execute(cmdl, tolerate_errors=True)
+ cmdl = ['sh', '-ec', 'cd /\n' + ' '.join(command)]
+ (es, stdout, stderr) = self._execute(cmdl, tolerate_errors=True)
stdout_data = self._getfilecontents(stdout)
- print >>sys.stderr, "VirtServ run", `command`,`cmdl`, '==>', `es`,`stdout`,`stderr`, '|', stdout_data
+ print >>sys.stderr, "VirtServ run", `command`, `cmdl`, '==>', `es`, `stdout`, `stderr`, '|', stdout_data
if es == 0 or ignore_errors: return (es, stdout_data)
stderr_data = self._getfilecontents(stderr)
logging.error('Command failed (status=%d): %s\n%s' %
@@ -1768,18 +1768,18 @@ class VirtServ(Chroot):
def chmod(self, path, mode):
self._execute(['chmod', ('0%o' % mode), self._tbpath(path)])
def remove_files(self, paths):
- self._execute(['rm','--'] + map(self._tbpath, paths))
+ self._execute(['rm', '--'] + map(self._tbpath, paths))
def copy_file(self, our_src, tb_dest):
- self._command(['copydown',(our_src,),
+ self._command(['copydown', (our_src,),
(self._tbpath(tb_dest)+'/'+os.path.basename(our_src),)])
def create_file(self, path, data):
path = self._tbpath(path)
try:
- (_,tf) = create_temp_file()
- f = file(tf,'w')
+ (_, tf) = create_temp_file()
+ f = file(tf, 'w')
f.write(tf)
f.close()
- self._command(['copydown',(tf,),(path,)])
+ self._command(['copydown', (tf,), (path,)])
finally:
os.remove(tf)
@@ -1798,8 +1798,8 @@ class VirtServ(Chroot):
vdict = {}
- tf = self._execute_getoutput(['find','/','-xdev','-printf',
- "%y %m %U %G %s %p %l \\n".replace(' ','\\0')])
+ tf = self._execute_getoutput(['find', '/', '-xdev', '-printf',
+ "%y %m %U %G %s %p %l \\n".replace(' ', '\\0')])
try:
f = file(tf)
@@ -1820,7 +1820,7 @@ class VirtServ(Chroot):
if not line: break
st = VirtServ.DummyStat()
- st.st_mode = mode_map[splut[0]] | int(splut[1],8)
+ st.st_mode = mode_map[splut[0]] | int(splut[1], 8)
(st.st_uid, st.st_gid, st.st_size) = map(int, splut[2:5])
vdict[splut[5]] = (st, splut[6])
@@ -1832,7 +1832,7 @@ class VirtServ(Chroot):
return vdict
def get_files_owned_by_packages(self):
- tf = self._execute_getoutput(['bash','-ec','''
+ tf = self._execute_getoutput(['bash', '-ec', '''
cd /var/lib/dpkg/info
find . -name "*.list" -type f -print0 | \\
xargs -r0 egrep . /dev/null
@@ -1842,7 +1842,7 @@ class VirtServ(Chroot):
try:
f = file(tf)
for l in f:
- (lf,pathname) = l.rstrip('\n').split(':',1)
+ (lf, pathname) = l.rstrip('\n').split(':', 1)
assert lf.endswith('.list')
pkg = lf[:-5]
if pathname in vdict:
@@ -1858,7 +1858,7 @@ class VirtServ(Chroot):
def check_for_broken_symlinks(self):
if not settings.check_broken_symlinks:
return
- tf = self._execute_getoutput(['bash','-ec','''
+ tf = self._execute_getoutput(['bash', '-ec', '''
find / -xdev -type l -print0 | \\
xargs -r0 -i'{}' \\
find '{}' -maxdepth 0 -follow -type l -ls
@@ -2771,7 +2771,7 @@ def parse_command_line():
default=False,
help="Fail if broken symlinks are detected.")
- parser.add_option("--log-level", action="store",metavar='LEVEL',
+ parser.add_option("--log-level", action="store", metavar='LEVEL',
default="dump",
help="Displays messages from LEVEL level, possible values are: error, info, dump, debug. The default is dump.")
diff --git a/piupartslib/packagesdb.py b/piupartslib/packagesdb.py
index 7876f73..8d3ac48 100644
--- a/piupartslib/packagesdb.py
+++ b/piupartslib/packagesdb.py
@@ -105,7 +105,7 @@ class Package(UserDict.UserDict):
vlist += self._parse_alternative_dependencies(header)
return vlist
- def prefer_alt_depends(self, header_name,dep_idx,dep):
+ def prefer_alt_depends(self, header_name, dep_idx, dep):
if header_name in self:
if header_name not in self._parsed_deps:
self._parse_dependencies(header_name)
@@ -127,7 +127,7 @@ class Package(UserDict.UserDict):
"""Get the recursive dependency count, if it has been calculated"""
if self._rrdep_count == None:
raise Exception('Reverse dependency count has not been calculated')
- return(self._rrdep_count)
+ return self._rrdep_count
def set_rrdep_count(self, val):
self._rrdep_count = val
@@ -136,7 +136,7 @@ class Package(UserDict.UserDict):
"""Get the number of packages blocked by this package"""
if self._block_count == None:
raise Exception('Block count has not been calculated')
- return(self._block_count)
+ return self._block_count
def set_block_count(self, val):
self._block_count = val
@@ -145,7 +145,7 @@ class Package(UserDict.UserDict):
"""Get the number of packages waiting for this package"""
if self._waiting_count == None:
raise Exception('Waiting count has not been calculated')
- return(self._waiting_count)
+ return self._waiting_count
def set_waiting_count(self, val):
self._waiting_count = val
@@ -765,11 +765,11 @@ class PackagesDB:
# use the Packages dependencies() method for a conservative count
for dep in self._packages[pkg_name].dependencies():
if dep in rdeps:
- rdeps[dep].append( pkg_name )
+ rdeps[dep].append(pkg_name)
else:
rdeps[dep] = [pkg_name]
- def recurse_rdeps( pkg_name, rdeps, rrdep_dict ):
+ def recurse_rdeps(pkg_name, rdeps, rrdep_dict):
""" Recurse through the reverse dep arrays to determine the recursive
dependency count for a package. rrdep_dict.keys() contains the
accumulation of rdeps encountered"""
@@ -780,21 +780,21 @@ class PackagesDB:
# break circular dependency loops
if not rdep in rrdep_dict:
rrdep_dict[rdep] = 1
- rrdep_dict = recurse_rdeps( rdep, rdeps, rrdep_dict )
+ rrdep_dict = recurse_rdeps(rdep, rdeps, rrdep_dict)
return rrdep_dict
# calculate all of the rrdeps and block counts
for pkg_name in self._packages.keys():
- rrdep_list = recurse_rdeps( pkg_name, rdeps, {} ).keys()
- self._packages[pkg_name].set_rrdep_count( len(rrdep_list) )
+ rrdep_list = recurse_rdeps(pkg_name, rdeps, {}).keys()
+ self._packages[pkg_name].set_rrdep_count(len(rrdep_list))
if self._package_state[pkg_name] in error_states:
block_list = [x for x in rrdep_list
if self._package_state[x] in error_states]
else:
block_list = []
- self._packages[pkg_name].set_block_count( len(block_list) )
+ self._packages[pkg_name].set_block_count(len(block_list))
if self._package_state[pkg_name] in waiting_states:
waiting_list = [x for x in rrdep_list
--
piuparts git repository
More information about the Piuparts-commits
mailing list