[med-svn] r1237 - in trunk/community: . qa qa/DebianQA qa/Parse qa/oldscripts qa/templates
hanska-guest at alioth.debian.org
hanska-guest at alioth.debian.org
Thu Jan 31 18:15:22 UTC 2008
Author: hanska-guest
Date: 2008-01-31 18:15:21 +0000 (Thu, 31 Jan 2008)
New Revision: 1237
Added:
trunk/community/qa/
trunk/community/qa/DebianQA/
trunk/community/qa/DebianQA/Archive.pm
trunk/community/qa/DebianQA/BTS.pm
trunk/community/qa/DebianQA/Cache.pm
trunk/community/qa/DebianQA/Classification.pm
trunk/community/qa/DebianQA/Common.pm
trunk/community/qa/DebianQA/Config.pm
trunk/community/qa/DebianQA/DebVersions.pm
trunk/community/qa/DebianQA/Svn.pm
trunk/community/qa/DebianQA/Watch.pm
trunk/community/qa/Parse/
trunk/community/qa/Parse/DebControl.pm
trunk/community/qa/README
trunk/community/qa/commoncheck
trunk/community/qa/debian-med.conf
trunk/community/qa/debianqa.conf-sample
trunk/community/qa/fetchdata
trunk/community/qa/htaccess
trunk/community/qa/maintainercheck
trunk/community/qa/oldscripts/
trunk/community/qa/oldscripts/Common.pm
trunk/community/qa/oldscripts/versioncheck
trunk/community/qa/oldscripts/versioncheck-html
trunk/community/qa/oldscripts/versioncheck.pl
trunk/community/qa/oldscripts/versioncheck2.pl
trunk/community/qa/oldscripts/versioncheck3.pl
trunk/community/qa/packagecheck
trunk/community/qa/qareport
trunk/community/qa/qareport-chlog.cgi
trunk/community/qa/qareport.cgi
trunk/community/qa/svncruftcheck
trunk/community/qa/templates/
trunk/community/qa/templates/by_category
trunk/community/qa/wnppcheck
Log:
Adding DebianQA scripts from Pkg-Perl team :)
Added: trunk/community/qa/DebianQA/Archive.pm
===================================================================
--- trunk/community/qa/DebianQA/Archive.pm (rev 0)
+++ trunk/community/qa/DebianQA/Archive.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,228 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Archive.pm 13820 2008-01-29 06:11:41Z tincho-guest $
+#
+# Module for retrieving data from the Debian archive, it reads Source.gz files,
+# and also downloads package lists from the NEW and INCOMING queues.
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package DebianQA::Archive;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(deb_download deb_get deb_get_consolidated);
+
+use DebianQA::Cache;
+use DebianQA::Common;
+use DebianQA::Config '%CFG';
+use DebianQA::Svn;
+use DebianQA::DebVersions;
+use Fcntl qw(:seek);
+use LWP::UserAgent;
+#use IO::Uncompress::Gunzip; # Only in lenny
+use Compress::Zlib ();
+use HTML::TableExtract;
+
+my $ua = new LWP::UserAgent;
+$ua->timeout(10);
+$ua->env_proxy;
+
+# Module for extracting source package listings from the Debian archive.
+# * If $force, current cache is ignored.
+#
+# Re-generates and returns the cache of consolidated versions (key "archive"),
+# which is keyed on package name and contains:
+# {
+# most_recent => $most_recent_version,
+# testing => $version_in_testing,
+# ....
+# }
+sub deb_download {
+ my $force = shift;
+ my @list = split(/\s*,\s*/, $CFG{archive}{suites});
+ my @ttls = split(/\s*,\s*/, $CFG{archive}{suites_ttl});
+ my %ttl = map({ $list[$_] => $ttls[$_] } (0..$#list));
+
+ if($CFG{archive}{new_url}) {
+ push @list, "new";
+ $ttl{new} = $CFG{archive}{new_ttl} || 60;
+ }
+ if($CFG{archive}{incoming_url}) {
+ push @list, "incoming";
+ $ttl{incoming} = $CFG{archive}{incoming_ttl} || 60;
+ }
+ my $data = {};
+ unless($force) {
+ $data = read_cache("archive", "", 0);
+ }
+ my $modified;
+ foreach my $src (@list) {
+ # I use find_stamp incorrectly on purpose: so each key acts as a root
+ if($force or ! $data->{$src}
+ or $ttl{$src} * 60 < time - find_stamp($data->{$src}, "")) {
+ info("$src is stale, getting new version") unless($force);
+ my $d;
+ if($src eq "new") {
+ $d = get_new();
+ } elsif($src eq "incoming") {
+ $d = get_incoming();
+ } else {
+ $d = get_sources($src);
+ }
+ if($d) {
+ update_cache("archive", $d, $src, 1, 0);
+ $modified = 1;
+ }
+ }
+ }
+ return unless($modified);
+ info("Re-generating consolidated hash");
+ my $pkgs = get_pkglist_hashref();
+ # retain lock, we need consistency
+ $data = read_cache("archive", "", 1);
+ my $g = {};
+ foreach my $suite (keys(%$data)) {
+ next unless($ttl{$suite});
+ foreach my $pkg (keys(%{$data->{$suite}})) {
+ next if($pkg =~ m#^/#);
+ next if(%$pkgs and not $pkgs->{$pkg});
+ $g->{$pkg}{$suite} = $data->{$suite}{$pkg};
+ }
+ }
+ # Hash for comparing equivalent versions in different suites
+ my %src_compare = (
+ oldstable => 1, # not 0, so no need to test defined()
+ sarge => 1,
+ stable => 2,
+ etch => 2,
+ testing => 3,
+ lenny => 3,
+ experimental => 4,
+ incoming => 5,
+ new => 6,
+ unstable => 7,
+ sid => 8,
+ other => 9
+ );
+ foreach my $pkg (keys(%$g)) {
+ my @recent = sort( {
+ deb_compare_nofail($g->{$pkg}{$a}, $g->{$pkg}{$b}) or
+ ($src_compare{$a} || $src_compare{other}) <=>
+ ($src_compare{$b} || $src_compare{other})
+ } keys(%{$g->{$pkg}}));
+ $g->{$pkg}{most_recent} = $g->{$pkg}{$recent[-1]};
+ $g->{$pkg}{most_recent_src} = $recent[-1];
+ }
+ $data = update_cache("consolidated", $g, "archive", 1, 0);
+ unlock_cache("archive");
+ return;
+}
+# Returns the consolidated hash of versions. Doesn't download anything.
+sub deb_get_consolidated {
+ my $path = shift || "";
+ return read_cache("consolidated", "archive/$path", 0);
+}
+# Returns the hash of versions. Doesn't download anything.
+sub deb_get {
+ return read_cache("archive", shift, 0);
+}
+sub get_sources {
+ my($suite) = shift;
+ my @sections = split(/\s*,\s*/, $CFG{archive}{sections});
+ my %vers;
+ foreach my $section(@sections) {
+ my $url = $CFG{archive}{mirror} . "/dists/$suite/$section/source/Sources.gz";
+ info("Downloading $url");
+ open(TMP, "+>", undef) or die $!;
+ my $res = $ua->get($url, ":content_cb" => sub {
+ print TMP $_[0] or die $!;
+ });
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ seek(TMP, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $gz = Compress::Zlib::gzopen(\*TMP, "rb")
+ or die "Can't open compressed file: $!\n";
+
+ my $data;
+ open($data, "+>", undef) or die $!;
+ my $buffer = " " x 4096;
+ my $bytes;
+ while(($bytes = $gz->gzread($buffer)) > 0) {
+ print $data $buffer;
+ }
+ die $gz->gzerror if($bytes < 0);
+ close TMP;
+ #my $z = new IO::Uncompress::Gunzip(\$data);
+
+ seek($data, 0, SEEK_SET) or die "Can't seek: $!\n";
+ # Blank line as "line" separator, so a "line" is a full record
+ local $/ = "";
+ while(<$data>) {
+ s/\n\s+//gm;
+ /^package:\s*(\S+)\s*$/mi or next;
+ my $pkg = $1;
+ /^version:\s*(\S+)\s*$/mi or next;
+ $vers{$pkg} = $1;
+ }
+ close $data;
+ }
+ return \%vers;
+}
+sub get_incoming {
+ my $url = $CFG{archive}{incoming_url};
+ info("Downloading $url");
+ my $res = $ua->get($url);
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ my $data = $res->decoded_content();
+ my %vers;
+ while($data =~ /<a href="([^_]+)_(.+)\.dsc">/g) {
+ debug("existing $1: $vers{$1} / $2") if(defined($vers{$1}));
+ if(!defined $vers{$1} or deb_compare($2, $vers{$1}) > 0) {
+ debug("replaced $1: $vers{$1} -> $2") if(defined($vers{$1}));
+ $vers{$1} = $2;
+ }
+ }
+ return \%vers;
+}
+sub get_new {
+ my $url = $CFG{archive}{new_url};
+ info("Downloading $url");
+ my $res = $ua->get($url);
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ my $data = $res->decoded_content();
+ my $te = new HTML::TableExtract( headers => [ qw(
+ Package Version Arch Distribution Age Maintainer Closes
+ ) ]);
+ $te->parse($data);
+ my %vers;
+ foreach my $table ($te->tables) {
+ foreach my $row ($table->rows) {
+ next unless $row->[2] =~ /source/;
+ my $pkg = $row->[0];
+ foreach(split(/\s+/, $row->[1])) {
+ next unless($_);
+ debug("existing $pkg: $vers{$pkg} / $_") if(
+ defined($vers{$pkg}));
+ if(!defined $vers{$pkg} or deb_compare($_, $vers{$pkg}) > 0) {
+ debug("replaced $pkg: $vers{$pkg} -> $_") if(
+ defined($vers{$pkg}));
+ $vers{$pkg} = $_;
+ }
+ }
+ }
+ }
+ return \%vers;
+}
+1;
Added: trunk/community/qa/DebianQA/BTS.pm
===================================================================
--- trunk/community/qa/DebianQA/BTS.pm (rev 0)
+++ trunk/community/qa/DebianQA/BTS.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,154 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: BTS.pm 12750 2008-01-14 20:54:11Z tincho-guest $
+#
+# Module for retrieving bugs from the BTS, using the SOAP interface
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package DebianQA::BTS;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(bts_download bts_get bts_get_consolidated);
+
+use DebianQA::Common;
+use DebianQA::Config '%CFG';
+use DebianQA::Cache;
+use DebianQA::Svn;
+use SOAP::Lite;
+
+#my $maint = 'pkg-perl-maintainers at lists.alioth.debian.org';
+
+sub bts_download {
+ my($force, @pkglist) = @_;
+ $force ||= 0;
+ debug("bts_download($force, (@pkglist))");
+
+ my @list;
+ my $cdata = {};
+ my $replace = 0;
+
+ my $soap = SOAP::Lite->uri($CFG{bts}{soap_uri})->proxy(
+ $CFG{bts}{soap_proxy});
+ unless($force) {
+ $cdata = read_cache("bts", "", 0);
+ }
+ my $pkginfo = get_pkglist_hashref();
+ if(@pkglist) {
+ # A list of packages to update has been received
+ unless($force) {
+ @pkglist = grep( {
+ $CFG{bts}{ttl} * 60 < time - find_stamp($cdata, $_)
+ } @pkglist);
+ return $cdata unless(@pkglist); # Cache is up-to-date
+ info("BTS info for @pkglist is stale") if(@pkglist);
+ }
+ info("Downloading list of bugs of (", join(", ", @pkglist),
+ ")");
+ @list = @{$soap->get_bugs( src => [ @pkglist ] )->result()};
+ } elsif($force or $CFG{bts}{ttl} * 60 < time - find_stamp($cdata, "")) {
+ # No list of packages; forced operation or stale cache
+ info("BTS info is stale") unless($force);
+ $replace = 1;
+ @pkglist = keys %$pkginfo;
+ # TODO: could verificate that pkglist and maint = $maint are the same
+ # packages
+ if(@pkglist) {
+ info("Downloading list of bugs of packages in the repo");
+ @list = @{$soap->get_bugs( src => [ @pkglist ] )->result()};
+ } else {
+ # Doesn't make sense to search bugs if we don't have the list
+ # of packages.
+ return {};
+# info("Downloading list of bugs assigned to $maint");
+# @list = @{$soap->get_bugs( maint => $maint )->result()};
+ }
+ } else {
+ # Cache is up to date
+ return $cdata;
+ }
+ my $bugs_st = {};
+ if(@list) {
+ info("Downloading status for ", scalar @list, " bugs");
+ $bugs_st = $soap->get_status(@list)->result();
+ }
+
+ my %binmap;
+ foreach my $src (keys %$pkginfo) {
+ $binmap{$_} = $src foreach(@{$pkginfo->{$src}{binaries} || []});
+ }
+ my %bugs = ();
+ foreach my $bug (keys %$bugs_st) {
+ # Until #458822 is solved, we need to use our own bin -> src mapping
+ my $binname = $bugs_st->{$bug}->{package};
+ # There could be more than one package!
+ $binname =~ s/\s+//g;
+ my @binnames = split(/,/, $binname);
+ my $found = 0;
+ foreach(@binnames) {
+ my $srcname = exists $pkginfo->{$_} ? $_ : $binmap{$_} or next;
+ $bugs{$srcname}{$bug} = $bugs_st->{$bug};
+ $found++;
+ }
+ unless($found) {
+ warn("Can't find source package for $binname in bug #$bug");
+ next;
+ }
+ }
+ # retain lock, we need consistency
+ $cdata = update_cache("bts", \%bugs, "", $replace, 1);
+
+ info("Re-generating consolidated hash");
+ @pkglist = keys %$pkginfo;
+
+ # TODO: Interesting fields:
+ # keywords/tags, severity, subject, forwarded, date
+ my %cbugs;
+ foreach my $pkgname (@pkglist) {
+ $bugs{$pkgname} ||= {};
+
+ # bugs to ignore if keyword present
+ my %ign_keywords = map({ $_ => 1 }
+ split(/\s*,\s*/, $CFG{bts}{ignore_keywords}));
+ # bugs to ignore if of specified severities
+ my %ign_severities = map({ $_ => 1 }
+ split(/\s*,\s*/, $CFG{bts}{ignore_severities}));
+
+ $cbugs{$pkgname} = {};
+ foreach my $bug (keys %{ $bugs{$pkgname} }) {
+ next unless(ref $bugs{$pkgname}{$bug});
+ # Remove done bugs
+ next if($bugs{$pkgname}{$bug}{done});
+ # Remove if severity match
+ next if($ign_severities{$bugs{$pkgname}{$bug}{severity}});
+ # Remove if keyword match
+ my @keywords = split(/\s+/, $bugs{$pkgname}{$bug}{keywords});
+ next if(grep({ $ign_keywords{$_} } @keywords));
+ $cbugs{$pkgname}{$bug} = {
+ keywords => $bugs{$pkgname}{$bug}{keywords},
+ # need to use a new key for compatibility
+ keywordsA => \@keywords,
+ severity => $bugs{$pkgname}{$bug}{severity},
+ subject => $bugs{$pkgname}{$bug}{subject},
+ forwarded=> $bugs{$pkgname}{$bug}{forwarded},
+ };
+ }
+ }
+ update_cache("consolidated", \%cbugs, "bts", 1, 0);
+ unlock_cache("bts");
+ return $cdata;
+}
+# Returns the hash of bugs. Doesn't download anything.
+sub bts_get {
+ return read_cache("bts", shift, 0);
+}
+# Returns the consolidated hash of bugs. Doesn't download anything.
+sub bts_get_consolidated {
+ my $path = shift || "";
+ return read_cache("consolidated", "bts/$path", 0);
+}
+1;
Added: trunk/community/qa/DebianQA/Cache.pm
===================================================================
--- trunk/community/qa/DebianQA/Cache.pm (rev 0)
+++ trunk/community/qa/DebianQA/Cache.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,226 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Cache.pm 12764 2008-01-15 12:07:48Z tincho-guest $
+#
+# Routines for handling cache files
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package DebianQA::Cache;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = (qw(
+ dump_cache unlock_cache read_cache update_cache find_stamp ));
+
+use DebianQA::Config '%CFG';
+use DebianQA::Common;
+use Storable qw(store_fd fd_retrieve);
+use Fcntl qw(:seek :flock);
+use File::Path;
+
+my %fd; # Hash of open FDs, to keep locks.
+my %memcache; # Memory cache for repeated requests
+
+sub dump_cache($;$) {
+ my($cache, $root) = @_;
+ $root ||= "";
+ $root =~ s{/+$}{};
+
+ if(! defined($fd{$cache})) {
+ mkpath $CFG{common}{cache_dir};
+ open $fd{$cache}, "<", "$CFG{common}{cache_dir}/$cache"
+ or die "Error opening cache: $!\n";
+ flock($fd{$cache}, LOCK_SH) or die "Error locking cache: $!\n";
+ }
+ my $fd = $fd{$cache};
+ seek($fd, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $data = {};
+ if(-s $fd) {
+ $data = fd_retrieve($fd) or die "Can't read cache: $!\n";
+ }
+ unlock_cache($cache);
+ require Data::Dumper;
+ print Data::Dumper::Dumper(dive_hash($data, $root));
+ 1;
+}
+# Releases any pending lock on a cache and closes the file.
+sub unlock_cache($) {
+ my $cache = shift;
+ return 0 unless($fd{$cache});
+ debug("Closing $CFG{common}{cache_dir}/$cache");
+ close($fd{$cache});
+ $fd{$cache} = undef;
+ 1;
+}
+sub read_cache($;$$) {
+ # * $root specifies a path inside the cache hash.
+ # * If $keep_lock, file is kept open and write-locked until the next
+ # operation.
+ #
+ # In scalar context returns the data as a hashref. In array context also
+ # returns the effective stamp as a second element. The effective
+ # stamp is the value of a "/stamp" key at the same level (or up) as
+ # $root. If there are single elements with newer stamps, they will have
+ # a "/stamp" subkey.
+ my($cache, $root, $keep_lock) = @_;
+ $root ||= "";
+ $keep_lock ||= 0;
+ debug("read_cache($cache, $root, $keep_lock) invoked");
+
+ $root = "/$root";
+ $root =~ s{/+$}{};
+
+ my $file = "$CFG{common}{cache_dir}/$cache";
+ unless(-e $file) {
+ return({}, 0) if(wantarray);
+ return {};
+ }
+ my $use_memcache = 0;
+ if(! defined($fd{$cache})) {
+ mkpath $CFG{common}{cache_dir};
+ if($keep_lock) {
+ debug("Opening $file in RW mode");
+ open $fd{$cache}, "+<", $file or die "Error opening cache: $!\n";
+ flock($fd{$cache}, LOCK_EX) or die "Error locking cache: $!\n";
+ } else {
+ if($memcache{$cache} and $memcache{$cache}{mtime} == -M $file) {
+ $use_memcache = 1;
+ } else {
+ debug("Opening $file in R mode");
+ open $fd{$cache}, "<", $file or die "Error opening cache: $!\n";
+ flock($fd{$cache}, LOCK_SH) or die "Error locking cache: $!\n";
+ }
+ }
+ }
+ my $data = {};
+ if($use_memcache) {
+ $data = $memcache{$cache}{data};
+ } else {
+ my $fd = $fd{$cache};
+ seek($fd, 0, SEEK_SET) or die "Can't seek: $!\n";
+ if(-s $fd) {
+ $data = fd_retrieve($fd) or die "Can't read cache: $!\n";
+ }
+ unlock_cache($cache) unless($keep_lock);
+ $memcache{$cache} = {
+ data => $data,
+ mtime => -M $file
+ };
+ }
+ my $rootd = dive_hash($data, $root);
+ return $rootd if(not wantarray);
+ return($rootd, find_stamp($data, $root));
+}
+sub update_cache($$;$$$$) {
+ # * $root specifies a path inside the cache hash.
+ # * $data is the data to merge/replace (depending on $replace) in the cache
+ # starting from $root. Note that it's merged at the first level: so
+ # existent data inside a key won't be kept.
+ # * If $keep_lock, file is kept open and write-locked until the next
+ # operation.
+ #
+ # A $stamp is added with key "/stamp", at the $root level if $replace,
+ # inside each key if not. If no $stamp is specified, the current unix time
+ # is used.
+ #
+ # Returns the whole cache
+ my($cache, $data, $root, $replace, $keep_lock, $stamp) = @_;
+ $root ||= "";
+ $root = "/$root";
+ $root =~ s{/+$}{};
+ $replace ||= 0;
+ $keep_lock ||= 0;
+ $stamp = time unless(defined $stamp);
+ debug("update_cache($cache, $data, $root, $replace, $keep_lock, $stamp) ",
+ "invoked");
+
+ my $file = "$CFG{common}{cache_dir}/$cache";
+ if(! defined($fd{$cache})) {
+ debug("Opening $file in RW mode");
+ if(-e $file) {
+ open($fd{$cache}, "+<", $file) or die "Error opening cache: $!\n";
+ } else {
+ mkpath $CFG{common}{cache_dir};
+ open($fd{$cache}, "+>", $file) or die "Error opening cache: $!\n";
+ }
+ flock($fd{$cache}, LOCK_EX) or die "Error locking cache: $!\n";
+ }
+ my $fd = $fd{$cache};
+ seek($fd, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $cdata = {};
+ if(-s $fd) {
+ $cdata = fd_retrieve($fd) or die "Can't read cache: $!\n";
+ }
+ if($replace) {
+ if($root =~ m{^/*$}) {
+ $root = $cdata = $data;
+ } else {
+ $root =~ s{/+([^/]+)$}{};
+ my $leaf = $1;
+ $root = dive_hash($cdata, $root);
+ $root = ($root->{$leaf} = $data);
+ }
+ $root->{"/stamp"} = $stamp;
+ $root->{"/version"} = $VERSION;
+ } else {
+ $root = dive_hash($cdata, $root);
+ foreach(keys(%$data)) {
+ $root->{$_} = $data->{$_};
+ $root->{$_}{"/stamp"} = $stamp;
+ }
+ }
+ seek($fd, 0, SEEK_SET) or die "Can't seek: $!\n";
+ store_fd($cdata, $fd) or die "Can't save cache: $!\n";
+ unless($keep_lock) {
+ unlock_cache($cache);
+ $memcache{$cache} = {
+ data => $cdata,
+ mtime => -M $file
+ };
+ }
+ return $cdata;
+}
+# Return a reference into $hash, as specified with $path
+# Creates or replaces any component that is not a hashref
+sub dive_hash($;$) {
+ my($hash, $path) = @_;
+ $path ||= "";
+ debug("dive_hash($hash, $path) invoked");
+ die "Invalid hashref" unless($hash and ref $hash and ref $hash eq "HASH");
+ my @path = split(m#/+#, $path);
+ my $ref = $hash;
+ foreach(@path) {
+ next unless($_);
+ my $r = $ref->{$_};
+ unless($r and ref $r and ref $r eq "HASH") {
+ $r = $ref->{$_} = {};
+ }
+ $ref = $r;
+ }
+ return $ref;
+}
+# Search a stamp in $hash, starting at $path and going upwards until the
+# root. Returns 0 if not found.
+# Remember to call it with the root of the cache, to have proper stamp and
+# version handling.
+sub find_stamp {
+ my($hash, $path) = @_;
+ $path ||= "";
+ debug("find_stamp($hash, $path) invoked");
+ die "Invalid hashref" unless($hash and ref $hash and ref $hash eq "HASH");
+ if(! $hash->{"/version"} or $hash->{"/version"} < $VERSION) {
+ return 0;
+ }
+ my $ctsmp = 0;
+ if($path =~ s{^/*([^/]+)}{}) {
+ my $root = $1;
+ $ctsmp = find_stamp($hash->{$root}, $path) if($hash->{$root});
+ }
+ if(not $ctsmp and exists($hash->{"/stamp"})) {
+ $ctsmp = $hash->{"/stamp"};
+ }
+ return $ctsmp || 0;
+}
+1;
Added: trunk/community/qa/DebianQA/Classification.pm
===================================================================
--- trunk/community/qa/DebianQA/Classification.pm (rev 0)
+++ trunk/community/qa/DebianQA/Classification.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,142 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Classification.pm 12348 2008-01-09 22:42:14Z tincho-guest $
+#
+# Module for classifying packages into problem clases. The idea is to make the
+# reporting scripts absolutely minimal, and to have a common code in different
+# report implementations.
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package DebianQA::Classification;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(classify);
+
+use DebianQA::Cache;
+#use DebianQA::Common;
+#use DebianQA::Config '%CFG';
+use DebianQA::DebVersions;
+
+# Takes a list of packages to process.
+# Returns an unique hash ready to use in reporting, keyed by package name.
+# package_name => {
+# status => { # Hash to ease lookup, empty if OK (@notes)
+# needs_upload => 1,
+# needs_upgrade => 1,
+# invalid_svn_version => 1,
+# ...
+# },
+# notes => [ ... ],
+# hilight => { # Problems indexed by highlighted item
+# archive => { needs_upload => 1, ... },
+# bts => { has_bugs => 1 }, ...
+# },
+# svn_path => "...",
+# upstream_url => "...", # Already extracted data for ease of use
+#
+# bts => {},
+# archive => {},
+# svn => {},
+# watch => {} # Copies from the caches
+# }
+
+my %error_hilight = (
+ archive_waiting => "archive",
+ needs_upload => "archive",
+ never_uploaded => "archive",
+ has_bugs => "bts",
+ not_finished => "svn",
+ repo_ancient => "svn",
+ needs_upgrade => "upstream",
+ upstream_ancient => "upstream",
+ watch_error => "upstream",
+# native => "",
+);
+
+sub classify(@) {
+ my @pkglist = @_;
+ my $data = read_cache(consolidated => "");
+ my %res = ();
+
+ foreach my $pkg (@pkglist) {
+ next if($pkg =~ /^\//);
+ my(%status, @notes);
+ # SVN versus archive
+ my $archive_ver = $data->{archive}{$pkg}{most_recent};
+ my $svn_ver = $data->{svn}{$pkg}{version};
+ my $svn_unrel_ver = $data->{svn}{$pkg}{un_version};
+ if(not $svn_ver or not $archive_ver) {
+ if(not $svn_ver) {
+ $status{not_finished} = 1;
+ }
+ if(not $archive_ver) {
+ $status{never_uploaded} = 1;
+ }
+ } elsif(deb_compare($archive_ver, $svn_ver) > 0) {
+ $status{repo_ancient} = 1;
+ push @notes, "$archive_ver > $svn_ver";
+ } elsif(deb_compare($archive_ver, $svn_ver) != 0
+ and not $svn_unrel_ver) {
+ $status{needs_upload} = 1;
+ }
+ # SVN versus upstream
+ my $repo_mangled_ver = $data->{svn}{$pkg}{mangled_ver};
+ my $repo_unrel_mangled_ver = $data->{svn}{$pkg}{mangled_un_ver};
+ my $upstream_mangled_ver = $data->{watch}{$pkg}{upstream_mangled};
+ # watch_error from svn is not needed, as Watch.pm copies it
+ my $watch_error = $data->{watch}{$pkg}{error};
+ if($watch_error and $watch_error eq "Native") {
+ #$status{native} = 1;
+ } elsif($watch_error) {
+ $status{watch_error} = 1;
+ push @notes, "Watch problem: $watch_error";
+ } elsif((not $repo_mangled_ver and not $repo_unrel_mangled_ver)
+ or not $upstream_mangled_ver) {
+ $status{watch_error} = 1; # Should not happen
+ push @notes, "Unexpected watchfile problem";
+ } elsif($repo_mangled_ver) { # Will not check if UNRELEASED (?)
+ if(deb_compare($repo_mangled_ver, $upstream_mangled_ver) > 0) {
+ $status{upstream_ancient} = 1;
+ push @notes, "$repo_mangled_ver > $upstream_mangled_ver";
+ }
+ if(deb_compare($repo_mangled_ver, $upstream_mangled_ver) < 0) {
+ $status{needs_upgrade} = 1;
+ }
+ }
+ # Archive
+ my $archive_latest = $data->{archive}{$pkg}{most_recent_src} || "";
+ if($archive_latest =~ /new|incoming/) {
+ $status{archive_waiting} = 1;
+ }
+ if($data->{bts}{$pkg} and %{$data->{bts}{$pkg}}) {
+ $status{has_bugs} = 1;
+ }
+ my %hilight;
+ foreach(keys %status) {
+ die "Internal error: $_ is not a valid status" unless(
+ $error_hilight{$_});
+ $hilight{$error_hilight{$_}}{$_} = 1;
+ }
+ $res{$pkg} = {
+ watch => $data->{watch}{$pkg},
+ archive => $data->{archive}{$pkg},
+ svn => $data->{svn}{$pkg},
+ bts => $data->{bts}{$pkg},
+ #
+ svn_path => $data->{svn}{$pkg}{dir},
+ upstream_url => $data->{watch}{$pkg}{upstream_url},
+ #
+ status => \%status,
+ notes => \@notes,
+ hilight => \%hilight
+ };
+ }
+ return \%res;
+}
+
+1;
Added: trunk/community/qa/DebianQA/Common.pm
===================================================================
--- trunk/community/qa/DebianQA/Common.pm (rev 0)
+++ trunk/community/qa/DebianQA/Common.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,58 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Common.pm 12770 2008-01-15 13:35:02Z tincho-guest $
+#
+# Common helper routines
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package DebianQA::Common;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(debug info warn error $VERSION);
+#our $VERSION = join(".", q$Revision: 12770 $ =~ /(\d+)/g);
+
+# Cannot use this on alioth
+#use version;
+#our $VERSION = qv("1.000");
+
+# Bump this version in case of data file change
+our $VERSION = 1.002;
+
+use DebianQA::Config '%CFG';
+use POSIX;
+
+my $basename;
+
+sub print_msg {
+ my($level, @msg) = @_;
+ return if($level > $CFG{common}{verbose});
+ unless($basename) {
+ $basename = $0;
+ $basename =~ s{.*/+}{};
+ }
+ @msg = split(/\n+/, join("", @msg));
+ foreach(@msg) {
+ if($CFG{common}{formatted_log}) {
+ printf(STDERR "%s %s[%d]: %s\n",
+ strftime("%b %e %H:%M:%S", localtime), $basename, $$, $_);
+ } else {
+ printf(STDERR $_);
+ }
+ }
+}
+sub error {
+ print_msg(0, @_);
+}
+sub warn {
+ print_msg(1, @_);
+}
+sub info {
+ print_msg(2, @_);
+}
+sub debug {
+ print_msg(3, @_);
+}
+1;
Added: trunk/community/qa/DebianQA/Config.pm
===================================================================
--- trunk/community/qa/DebianQA/Config.pm (rev 0)
+++ trunk/community/qa/DebianQA/Config.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,139 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Config.pm 12750 2008-01-14 20:54:11Z tincho-guest $
+#
+# Module that holds configuration variables. Also has subroutines for parsing
+# command line options and the configuration file.
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+package DebianQA::Config;
+
+use strict;
+use warnings;
+
+use FindBin;
+use Getopt::Long;
+
+our @EXPORT = qw(%CFG read_config getopt_common);
+our @ISA = "Exporter";
+
+# Default values
+my %defaults = (
+ qareport_cgi => {
+ templates_path => "templates",
+ default_template => "by_category",
+ group_name => "Unnamed Packaging Group",
+ group_url => "http://www.debian.org/",
+ wsvn_url => undef,
+ },
+ svn => {
+ repository => undef,
+ packages_path => "trunk",
+ post_path => ""
+ },
+ archive => {
+ mirror => "ftp://ftp.debian.org/debian",
+ suites => "unstable, testing, stable, oldstable, experimental",
+ sections => "main, contrib, non-free",
+ suites_ttl => "360, 360, 10080, 10080, 360",
+ new_url => 'http://ftp-master.debian.org/new.html',
+ new_ttl => 60,
+ incoming_url => 'http://incoming.debian.org',
+ incoming_ttl => 60,
+ },
+ watch => {
+ ttl => 360,
+ use_cpan => 1,
+ cpan_mirror => "ftp://cpan.org/ls-lR.gz",
+ cpan_ttl => 360 # 6 hours
+ },
+ bts => {
+ ttl => 360, # 6 hours
+ soap_proxy => 'http://bugs.debian.org/cgi-bin/soap.cgi',
+ soap_uri => 'Debbugs/SOAP',
+ ignore_keywords => "",
+ ignore_severities => ""
+ },
+ common => {
+ cache_dir => "$ENV{HOME}/.debianqa/yourgroup",
+ # verbosity level: error => 0, warn => 1, info => 2, debug => 3
+ # Should be 1 by default, 0 for quiet mode
+ verbose => 1,
+ # Prepend syslog-style format?
+ formatted_log => 1
+ }
+);
+our %CFG = %defaults; # Global configuration
+my %valid_cfg;
+foreach my $section (keys %defaults) {
+ $valid_cfg{$section} = { map({ $_ => 1 } keys(%{$defaults{$section}})) };
+}
+
+sub read_config(;$) {
+ my $file = shift;
+ unless($file) {
+ if($ENV{DEBIAN_QA_CONF}) {
+ $file = $ENV{DEBIAN_QA_CONF};
+ } elsif(-e "$ENV{HOME}/.debianqa/debianqa.conf") {
+ $file = "$ENV{HOME}/.debianqa/debianqa.conf";
+ } elsif(-e "/etc/debianqa.conf") {
+ $file = "/etc/debianqa.conf";
+ } elsif(-e "debianqa.conf") {
+ $file = "debianqa.conf";
+ } elsif(-e "$FindBin::Bin/debianqa.conf") {
+ $file = "$FindBin::Bin/debianqa.conf";
+ } else {
+ die "Can't find any configuration file!\n";
+ }
+ }
+ die "Can't read configuration file: $file\n" unless(-r $file);
+
+ my $section = "common";
+ open(CFG, "<", $file) or die "Can't open $file: $!\n";
+ while(<CFG>) {
+ chomp;
+ s/(?<!\S)[;#].*//;
+ s/\s+$//;
+ next unless($_);
+ if(/^\s*\[\s*(\w+)\s*\]\s*$/) {
+ $section = lc($1);
+ die "Invalid section in configuration file: $section\n" unless(
+ exists($valid_cfg{$section}));
+ next;
+ }
+ unless(/^\s*([^=]+?)\s*=\s*(.*)/) {
+ die "Unrecognised line in configuration file: $_\n";
+ }
+ my($key, $val) = ($1, $2);
+ unless(exists($valid_cfg{$section}{$key})) {
+ die("Unrecognised configuration parameter $key in section " .
+ "$section\n");
+ }
+ if($val =~ s/^~\///) { # UGLY!
+ $val = $ENV{HOME} . "/$val";
+ }
+ $CFG{$section}{$key} = $val;
+ }
+ close CFG;
+}
+# Parses command line options, loads configuration file if specified, removes
+# arguments from @ARGV and returns a hash with the parsed options.
+# If $passthru, ignores unknown parameters and keeps them in @ARGV.
+# If $readconf, will call read_config even if the user didn't say --conf
+sub getopt_common(;$$) {
+ my($passthru, $readconf) = @_;
+ my($conffile, $force, $v, $q) = (undef, 0, 0, 0);
+ my $p = new Getopt::Long::Parser;
+ $p->configure(qw(no_ignore_case bundling),
+ $passthru ? ("pass_through") : ());
+ $p->getoptions(
+ 'conf|c=s' => \$conffile, 'force|f!' => \$force,
+ 'verbose|v:+' => \$v, 'quiet|q:+' => \$q
+ ) or die("Error parsing command-line arguments\n");
+ read_config($conffile) if($conffile or $readconf);
+ $CFG{common}{verbose} += $v - $q;
+ return {
+ force => $force # only one argument for now
+ };
+}
+1;
Added: trunk/community/qa/DebianQA/DebVersions.pm
===================================================================
--- trunk/community/qa/DebianQA/DebVersions.pm (rev 0)
+++ trunk/community/qa/DebianQA/DebVersions.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,86 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: DebVersions.pm 9707 2007-11-24 05:12:07Z tincho-guest $
+#
+# Routines for comparing package versions, based on policy + dpkg code
+# I'm not using AptPkg::Version since it depends on having a working apt and
+# dpkg, it's overly complicated and underdocumented.
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package DebianQA::DebVersions;
+use strict;
+use warnings;
+use Carp;
+
+our @ISA = "Exporter";
+our @EXPORT = qw( deb_compare deb_compare_nofail );
+
+sub deb_parse($) {
+ my $v = shift;
+ unless(defined $v) {
+ carp "Empty debian package version passed";
+ return ();
+ }
+ unless($v =~ /^(?:(\d+):)?([A-Za-z0-9+.:~_-]*?)(?:-([+.~_A-Za-z0-9]+))?$/) {
+ warn "Invalid debian package version: $v\n";
+ return ();
+ };
+ return($1 || 0, $2, $3 || "");
+}
+sub dpkg_order($) {
+ my $v = shift;
+ return 0 if (! defined($v) or $v =~ /[0-9]/);
+ return -1 if ($v eq '~');
+ return ord($v) if ($v =~ /[a-zA-Z]/);
+ return ord($v) + 256;
+}
+sub deb_verrevcmp($$) {
+ my($a, $b) = @_;
+ my($x, $y);
+ while(length($a) or length($b)) {
+ while(1) {
+ $x = length($a) ? substr($a, 0, 1) : undef;
+ $y = length($b) ? substr($b, 0, 1) : undef;
+ last unless((defined $x and $x =~ /\D/) or
+ (defined $y and $y =~ /\D/));
+ my $r = dpkg_order($x) <=> dpkg_order($y);
+ return $r if($r);
+ substr($a, 0, 1, "") if(defined $x);
+ substr($b, 0, 1, "") if(defined $y);
+ }
+ $a =~ s/^(\d*)//;
+ $x = $1 || 0;
+ $b =~ s/^(\d*)//;
+ $y = $1 || 0;
+ my $r = $x <=> $y;
+ return $r if($r);
+ }
+ return 0;
+}
+sub deb_compare($$) {
+ my @va = deb_parse($_[0]) or return undef;
+ my @vb = deb_parse($_[1]) or return undef;
+
+ # Epoch
+ return $va[0] <=> $vb[0] unless($va[0] == $vb[0]);
+
+ my $upstreamcmp = deb_verrevcmp($va[1], $vb[1]);
+ return $upstreamcmp unless(defined $upstreamcmp and $upstreamcmp == 0);
+
+ return deb_verrevcmp($va[2], $vb[2]);
+}
+sub deb_compare_nofail($$) {
+ my @va = deb_parse($_[0]) or return 1;
+ my @vb = deb_parse($_[1]) or return -1;
+
+ # Epoch
+ return $va[0] <=> $vb[0] unless($va[0] == $vb[0]);
+
+ my $upstreamcmp = deb_verrevcmp($va[1], $vb[1]);
+ return $upstreamcmp unless(defined $upstreamcmp and $upstreamcmp == 0);
+
+ return deb_verrevcmp($va[2], $vb[2]);
+}
+
+1;
Added: trunk/community/qa/DebianQA/Svn.pm
===================================================================
--- trunk/community/qa/DebianQA/Svn.pm (rev 0)
+++ trunk/community/qa/DebianQA/Svn.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,425 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Svn.pm 12767 2008-01-15 13:12:33Z tincho-guest $
+#
+# Module for retrieving data from the SVN repository. It understands SVN
+# revisions and uses them instead of timestamps for checking cache validity. It
+# parses changelog and watch files.
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package DebianQA::Svn;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = (qw(
+ svn_download svn_get svn_get_consolidated
+ svndir2pkgname pkgname2svndir get_pkglist get_pkglist_hashref
+ ));
+
+use IO::Scalar;
+use Digest::MD5 "md5_hex";
+use Parse::DebianChangelog;
+use DebianQA::Cache;
+use DebianQA::Common;
+use DebianQA::Config '%CFG';
+use DebianQA::DebVersions;
+use Parse::DebControl;
+use SVN::Client;
+
+# Returns the list of changed directories
+sub svn_download {
+ my($force, $revision, @dirlist) = @_;
+ $force ||= 0;
+ $revision ||= 0;
+ debug("svn_download($force, $revision, (@dirlist))");
+
+ die "Missing SVN repository" unless($CFG{svn}{repository});
+ my $svnpath = $CFG{svn}{repository};
+
+ # Sanitise, as SVN::Client is too stupid
+ $svnpath =~ s{/+$}{};
+ $svnpath .= "/";
+ $svnpath .= $CFG{svn}{packages_path} if($CFG{svn}{packages_path});
+ $svnpath =~ s{/+$}{};
+ my $svnpostpath = $CFG{svn}{post_path} || "";
+ # Always has a slash if not empty
+ $svnpostpath =~ s{^/*(.*?)/*$}{/$1} if($svnpostpath);
+
+ my $complete = ! @dirlist;
+
+ our $svn = SVN::Client->new();
+ unless($revision) {
+ info("Retrieving last revision number from SVN");
+ $svn->info($svnpath, undef, "HEAD", sub {
+ $revision = $_[1]->rev();
+ }, 0);
+ }
+
+ if($complete) {
+ info("Retrieving list of directories in SVN");
+ my %dirlist = %{$svn->ls($svnpath, 'HEAD', 0)};
+ @dirlist = grep({ $dirlist{$_}->kind() == $SVN::Node::dir }
+ keys(%dirlist));
+ info(scalar @dirlist, " directories to process");
+ }
+ my(%changed, %svn);
+
+ if($force) {
+ %changed = map({ $_ => 1 } @dirlist);
+ } else {
+ my $cdata = read_cache("svn", "", 0);
+ if(find_stamp($cdata, "") == $revision
+ and keys(%$cdata) > @dirlist + 1) {
+ return (); # Cache is up-to-date
+ }
+
+ # Stamps from cache
+ my %cache_vers = map({ $_ => find_stamp($cdata, $_) }
+ grep({ $cdata->{$_} } @dirlist));
+ # Never updated
+ %changed = map({ $_ => 1 } grep( { not $cache_vers{$_} } @dirlist));
+
+ # Now search in the SVN log to see if there's any interesting change
+ # Remove from list already updated parts of the cache
+ # Also remove invalid dirs
+ my %invalid;
+ foreach my $dir (grep({ $cache_vers{$_}
+ and $cache_vers{$_} < $revision } @dirlist)) {
+ $dir =~ s{^/*(.*?)/*$}{$1};
+ my $pkghome = "$svnpath/$dir$svnpostpath";
+ safe_svn_op($svn, "log", [ $pkghome ], $cache_vers{$dir},
+ "HEAD", 1, 1, sub {
+ foreach (keys %{$_[0]}) {
+ $changed{$dir} = 1 if(m{/debian/(changelog|control|watch)$});
+ }
+ }) or $invalid{$dir} = 1;
+ }
+ foreach(keys %invalid) {
+ info("Removing invalid $_ directory");
+ $svn{$_} = {};
+ }
+ # Copy the not-changed dirs that we want to have the stamp bumped
+ foreach(grep({ ! $changed{$_} } @dirlist)) {
+ $svn{$_} = $cdata->{$_} if($cdata->{$_});
+ }
+ }
+ my @changed = keys %changed;
+ foreach my $dir (@changed) {
+ $dir =~ s{^/*(.*?)/*$}{$1};
+ my $debdir = "$svnpath/$dir$svnpostpath/debian";
+ $svn{$dir} = {};
+
+ info("Retrieving control information for $dir");
+ my $control = get_svn_file($svn, "$debdir/control");
+
+ unless($control) {
+ $svn{$dir}{error} = "MissingControl";
+ # Check if it's an invalid dir
+ safe_svn_op($svn, "ls", $debdir, 'HEAD', 0) and next;
+ info("Removing invalid $dir directory");
+ $svn{$dir} = {};
+ next;
+ }
+
+ info("Retrieving changelog for $dir");
+ my $changelog = get_svn_file($svn, "$debdir/changelog");
+
+ unless($changelog) {
+ $svn{$dir}{error} = "MissingChangelog";
+ next;
+ }
+
+ # Parse::DebControl hands back a strange structure... A hash-like
+ # thing, where [0] includes the debian/control fields for the
+ # source package and [1] for the first binary package (and, were
+ # they to exist, [2] and on for the other binary packages - which
+ # we will wisely ignore)
+ my ($ctrl_data, $short, $long);
+ $control =~ s/^#.*\n//gm; # stripComments looks like nonsense to me
+ $ctrl_data = Parse::DebControl->new->parse_mem($control, {
+ discardCase => 1 # unreliable if don't
+ });
+ ($short, $long) = split_description($ctrl_data->[1]{description});
+
+ $svn{$dir}{pkgname} = $ctrl_data->[0]{source};
+ my @section = split(/\s*\/\s*/, $ctrl_data->[0]{section});
+ unshift @section, "main" unless(@section > 1);
+ $svn{$dir}{section} = $section[0];
+ $svn{$dir}{subsection} = $section[1];
+ $svn{$dir}{uploaders} = $ctrl_data->[0]{uploaders};
+ $svn{$dir}{maintainer} = $ctrl_data->[0]{maintainer};
+ $svn{$dir}{std_version} = $ctrl_data->[0]{'standards-version'};
+ $svn{$dir}{b_d} = $ctrl_data->[0]{'build-depends'};
+ $svn{$dir}{b_d_i} = $ctrl_data->[0]{'build-depends-indep'};
+ $svn{$dir}{short_descr} = $short;
+ $svn{$dir}{long_descr} = $long;
+ my %bins;
+ foreach(1..$#$ctrl_data) {
+ my $bin = $ctrl_data->[$_];
+ my ($shd, $lnd) = split_description($bin->{description});
+ $svn{$dir}{bindata}[$_-1] = {
+ %$bin,
+ short_descr => $shd,
+ long_descr => $lnd,
+ };
+ delete $svn{$dir}{bindata}[$_-1]{description};
+ $bins{$bin->{package}} = 1;
+ if($bin->{provides}) {
+ foreach(split(/\s*,\s*/, $bin->{provides})) {
+ $bins{$_} = 1;
+ }
+ }
+ }
+ $svn{$dir}{binaries} = [ sort keys %bins ];
+ my $parser = Parse::DebianChangelog->init({
+ instring => $changelog });
+ my $error = $parser->get_error() or $parser->get_parse_errors();
+ if($error) {
+ error($error);
+ $svn{$dir}{error} = "InvalidChangelog";
+ next;
+ }
+
+ my($lastchl, $unfinishedchl);
+ foreach($parser->data()) {
+ if($_->Distribution =~ /^(?:unstable|experimental)$/) {
+ $lastchl = $_;
+ last;
+ }
+ if(! $unfinishedchl and $_->Distribution eq "UNRELEASED") {
+ $unfinishedchl = $_;
+ }
+ }
+ unless($lastchl or $unfinishedchl) {
+ $svn{$dir}{error} = "InvalidChangelog";
+ next;
+ }
+ if($lastchl) {
+ $svn{$dir}{version} = $lastchl->Version;
+ $svn{$dir}{date} = $lastchl->Date;
+ $svn{$dir}{changer} = $lastchl->Maintainer;
+ $svn{$dir}{text} = join(
+ "\n",
+ map( $lastchl->$_, qw(Header Changes Trailer) ),
+ );
+ }
+ if($unfinishedchl) {
+ $svn{$dir}{un_version} = $unfinishedchl->Version;
+ $svn{$dir}{un_date} = $unfinishedchl->Date;
+ $svn{$dir}{un_changer} = $unfinishedchl->Maintainer;
+ $svn{$dir}{un_text} = join(
+ "\n",
+ map( $unfinishedchl->$_, qw(Header Changes Trailer) ),
+ );
+ }
+ if($svn{$dir}{pkgname} ne $parser->dpkg()->{Source}) {
+ $svn{$dir}{error} = "SourceNameMismatch";
+ next;
+ }
+
+ info("Retrieving watchfile for $dir");
+ my $watchdata = get_svn_file($svn, "$debdir/watch");
+ unless($watchdata) {
+ if($svn{$dir}{version} and $svn{$dir}{version} !~ /-/) {
+ $svn{$dir}{watch_error} = "Native";
+ } else {
+ $svn{$dir}{watch_error} = "Missing";
+ }
+ next;
+ }
+ my $watch = parse_watch($svn{$dir}{version}, $watchdata);
+ # Returns undef on error
+ unless($watch and @$watch) {
+ $svn{$dir}{watch_error} = "Invalid";
+ next;
+ }
+ my @versions = sort({ deb_compare_nofail($a, $b) }
+ grep(defined, map({ $_->{mangled_ver} } @$watch)));
+
+ $svn{$dir}{mangled_ver} = $versions[-1];
+ $svn{$dir}{watch} = $watch;
+
+ # Again for unreleased
+ $watch = parse_watch($svn{$dir}{un_version}, $watchdata) if(
+ $svn{$dir}{un_version});
+ # Returns undef on error
+ if($watch and @$watch) {
+ @versions = sort({ deb_compare_nofail($a, $b) }
+ grep(defined, map({ $_->{mangled_ver} } @$watch)));
+ $svn{$dir}{mangled_un_ver} = $versions[-1];
+ }
+ }
+ # Retain lock
+ my $cdata = update_cache("svn", \%svn, "", $complete, 1, $revision);
+
+ my @pkglist = grep({ ref $cdata->{$_} and $cdata->{$_}{pkgname} }
+ keys(%$cdata));
+ my %pkglist;
+ foreach(@pkglist) {
+ $pkglist{$cdata->{$_}{pkgname}} = {
+ svndir => $_,
+ binaries => $cdata->{$_}{binaries}
+ };
+ }
+ update_cache("consolidated", \%pkglist, "pkglist", 1, 1);
+ my %svn2;
+ foreach(keys(%$cdata)) {
+ next unless ref($cdata->{$_});
+ my $pkgname = $cdata->{$_}{pkgname} or next;
+ # Shallow copy, it's enough here, but can't be used for anything else
+ $svn2{$pkgname} = { %{$cdata->{$_}} };
+ $svn2{$pkgname}{dir} = $_;
+ delete $svn2{$pkgname}{$_} foreach(
+ qw(watch pkgname text un_text long_descr bindata)
+ );
+ }
+ update_cache("consolidated", \%svn2, "svn", 1, 0);
+ unlock_cache("svn");
+ return @changed;
+}
+# Returns the hash of svn info. Doesn't download anything.
+sub svn_get {
+ return read_cache("svn", shift, 0);
+}
+# Returns the consolidated hash of svn info. Doesn't download anything.
+sub svn_get_consolidated {
+ my $path = shift || "";
+ return read_cache("consolidated", "svn/$path", 0);
+}
+# Searches the source package name given a svn directory name
+# Returns undef if not found
+sub svndir2pkgname($) {
+ my $dir = shift;
+ my $data = read_cache("svn", $dir, 0);
+ return $data->{pkgname};
+}
+# Searches the svn directory name given a source package name
+# Returns undef if not found
+sub pkgname2svndir($) {
+ my $pkg = shift;
+ my $data = read_cache("svn", "", 0);
+ my @dirs = grep({ ref $data->{$_} and $data->{$_}{pkgname} and
+ $data->{$_}{pkgname} eq $pkg } keys %$data);
+ return $dirs[0] if(@dirs);
+ return undef;
+}
+# Returns the list of source packages detected in the svn repository
+sub get_pkglist {
+ my $list = get_pkglist_hashref();
+ return keys %$list;
+}
+sub get_pkglist_hashref {
+ my $list = read_cache("consolidated", "pkglist", 0);
+ foreach(grep({ /^\// } keys %$list)) {
+ delete $list->{$_};
+ }
+ return $list;
+}
+# Parses watchfile, returns an arrayref containing one element for each source,
+# consisting of the URL spec, an MD5 sum of the line (to detect changes from
+# the watch module), the mangled debian version, and a hash of options.
+sub parse_watch($$) {
+ my($version, $watch) = @_;
+ $version ||= '';
+ $watch ||= '';
+ debug("parse_watch('$version', '...')");
+ $watch =~ s/\\\n//gs;
+
+ # Strip epoch and debian release
+ $version =~ s/^(?:\d+:)?(.+?)(?:-[^-]+)?$/$1/;
+
+ my @watch_lines = split(/\n/, $watch);
+ @watch_lines = grep((!/^#/ and !/^version\s*=/ and !/^\s*$/),
+ @watch_lines);
+
+ my @wspecs;
+ foreach(@watch_lines) {
+ debug("Watch line: $_");
+
+ # opts either contain no spaces, or is enclosed in double-quotes
+ my $opts = $1 if(s!^\s*opts="([^"]*)"\s+!! or
+ s!^\s*opts=(\S*)\s+!!);
+ debug("Watch line options: $opts") if($opts);
+
+ # several options are separated by comma and commas are not allowed
+ # within
+ my @opts = split(/\s*,\s*/, $opts) if($opts);
+ my %opts;
+ foreach(@opts) {
+ next if /^(?:active|passive|pasv)$/;
+ /([^=]+)=(.*)/;
+ my($k, $v) = ($1, $2);
+ debug("Watch option $k: $v");
+ if($k eq 'versionmangle') {
+ push @{$opts{uversionmangle}}, $v;
+ push @{$opts{dversionmangle}}, $v;
+ } else {
+ push @{$opts{$k}}, $v;
+ }
+ }
+ my $mangled = $version;
+ if($version and $opts{dversionmangle}) {
+ foreach(split(/;/, join(";", @{$opts{dversionmangle}}))) {
+ debug("Executing \$mangled =~ $_");
+ eval "\$mangled =~ $_";
+ if($@) {
+ error("Invalid watchfile: $@");
+ return undef;
+ }
+ }
+ }
+ debug("Mangled version: $mangled");
+ push @wspecs, {
+ line => $_,
+ mangled_ver => $mangled,
+ md5 => md5_hex(($opts || "").$_),
+ opts => \%opts
+ };
+ }
+ return \@wspecs;
+}
+sub get_svn_file($$) {
+ my($svn, $target) = @_;
+ my $svn_error;
+ my $data;
+ my $fh = IO::Scalar->new(\$data);
+ safe_svn_op($svn, "cat", $fh, $target , 'HEAD');
+ return $data;
+}
+sub safe_svn_op($$@) {
+ my($svn, $op, @opts) = @_;
+ local $SVN::Error::handler = undef;
+ my ($svn_error) = eval "\$svn->$op(\@opts)";
+ die $@ if($@);
+ if(SVN::Error::is_error($svn_error)) {
+ if($svn_error->apr_err() == $SVN::Error::FS_NOT_FOUND) {
+ $svn_error->clear();
+ return 0;
+ } else {
+ SVN::Error::croak_on_error($svn_error);
+ }
+ }
+ return 1;
+}
+
+sub split_description($) {
+ # The 'description' field in debian/control is, IMHO, wrongly handled - Its
+ # first line is the short description, and the rest (second to last lines)
+ # is the long description. So... Here we just split it, for proper
+ # handling.
+ #
+ # Gets the full description as its only parameter, returns the short and
+ # the long descriptions.
+ my ($str, $offset, $short, $long);
+ $str = shift;
+ $offset = index($str, "\n");
+ $short = substr($str, 0, $offset);
+ $long = substr($str, $offset+1);
+ return ($short, $long);
+}
+
+1;
Added: trunk/community/qa/DebianQA/Watch.pm
===================================================================
--- trunk/community/qa/DebianQA/Watch.pm (rev 0)
+++ trunk/community/qa/DebianQA/Watch.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,443 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Watch.pm 11498 2007-12-23 10:41:25Z tincho-guest $
+#
+# Module for scanning watch files and checking upstream versions.
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package DebianQA::Watch;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(watch_download watch_get watch_get_consolidated);
+
+use Compress::Zlib ();
+use CPAN::DistnameInfo;
+use DebianQA::Cache;
+use DebianQA::Common;
+use DebianQA::Config '%CFG';
+use DebianQA::Svn;
+use DebianQA::DebVersions;;
+use Fcntl qw(:seek);
+use LWP::UserAgent;
+
+my $cpanregex = qr#^((?:http|ftp)://\S*(?:cpan|backpan)\S*)/(dist|modules/by-module|(?:by-)?authors/id)\b#i;
+
+my $ua = new LWP::UserAgent;
+$ua->timeout(10);
+$ua->env_proxy;
+
+sub watch_download {
+ my($force, @pkglist) = @_;
+ $force ||= 0;
+ debug("watch_download($force, (@pkglist))");
+
+ if($CFG{watch}{use_cpan}) {
+ cpan_dist_download($force);
+ cpan_index_download($force);
+ }
+ my $complete;
+ if(not @pkglist) {
+ $complete = 1;
+ @pkglist = grep(! /^\//, get_pkglist());
+ }
+ my $cdata = watch_get() unless($force);
+ my(%watch, %watch2, @not_updated);
+ foreach my $pkg (@pkglist) {
+ debug("Retrieving svn info for $pkg");
+ my $svndata = svn_get(pkgname2svndir($pkg));
+ if($svndata->{watch_error}) {
+ $watch2{$pkg} = { error => $svndata->{watch_error} };
+ next;
+ }
+ unless($svndata->{watch} and ref $svndata->{watch}
+ and ref $svndata->{watch} eq "ARRAY") {
+ $watch2{$pkg} = { error => "Missing" };
+ next;
+ }
+ my @wlines = @{$svndata->{watch}};
+ unless(@wlines) {
+ $watch2{$pkg} = { error => "Empty" };
+ next;
+ }
+ my @wresult;
+ foreach my $wline (@wlines) {
+ my $md5 = $wline->{md5};
+ next unless($md5);
+ if(not $force and $cdata->{$md5} and
+ $CFG{watch}{ttl} * 60 > time - find_stamp($cdata, $md5)) {
+ $watch{$md5} = $cdata->{$md5};
+ push @not_updated, $md5;
+ } else {
+ my ($watcherr, %uscand) = uscan($wline->{line},
+ %{$wline->{opts}});
+ if($watcherr) {
+ warn("Error while processing $pkg watch file: $watcherr");
+ } else {
+ info("Found: version $uscand{upstream_version} ",
+ "from $uscand{upstream_url} ",
+ "(mangled: $uscand{upstream_mangled})");
+ }
+ $watch{$md5} = { watch_error => $watcherr, %uscand };
+ }
+ my $diff = 0;
+ if(not $watch{$md5}{upstream_mangled}) {
+ $watch{$md5}{watch_error} ||= "Error";
+ } elsif($wline->{mangled_ver}) {
+ $diff = deb_compare($wline->{mangled_ver},
+ $watch{$md5}{upstream_mangled});
+ $watch{$md5}{watch_error} = "InvalidVersion" unless(
+ defined $diff);
+ }
+ push @wresult, { diff => $diff, %{$watch{$md5}} };
+ }
+ my @noerror = grep({ not $_->{watch_error} } @wresult);
+ @noerror = sort({
+ deb_compare_nofail($a->{upstream_mangled},
+ $b->{upstream_mangled}) } @noerror);
+ unless(@noerror) {
+ $watch2{$pkg} = { error => $wresult[0]{watch_error} };
+ next;
+ }
+ my @result;
+ if(@result = grep({ $_->{diff} < 0 } @noerror)) {
+ $watch2{$pkg} = $result[-1];
+ } elsif(@result = grep( { not $_->{diff} } @noerror)) {
+ $watch2{$pkg} = $result[0];
+ } else {
+ $watch2{$pkg} = $noerror[0];
+ }
+ delete($watch2{$pkg}{diff}) unless($watch2{$pkg}{diff});
+ delete($watch2{$pkg}{watch_error}) unless($watch2{$pkg}{watch_error});
+ }
+ delete $watch{$_} foreach(@not_updated);
+ update_cache("watch", \%watch, "", $complete && @not_updated == 0, 1);
+ update_cache("consolidated", \%watch2, "watch", $complete, 0);
+ unlock_cache("watch");
+ info("watch: ", scalar @pkglist, " packages scanned");
+}
+# Returns the hash of bugs. Doesn't download anything.
+sub watch_get {
+ return read_cache("watch", shift, 0);
+}
+# Returns the consolidated hash of bugs. Doesn't download anything.
+sub watch_get_consolidated {
+ my $path = shift || "";
+ return read_cache("consolidated", "watch/$path", 0);
+}
+sub uscan($) {
+ my($wline, %opts) = @_;
+ info("Processing watch line $wline");
+
+ $wline =~ s{^http://sf\.net/(\S+)}{http://qa.debian.org/watch/sf.php/$1};
+ # Fix URIs with no path
+ $wline =~ s{^(\w+://[^\s/]+)(\s|$)}{$1/$2};
+ unless($wline =~ m{^(?:(?:https?|ftp)://\S+?)/}) {
+ warn("Invalid watch line: $wline");
+ return("Invalid");
+ }
+ my @items = split(/\s+/, $wline);
+
+ my($dir, $filter);
+ # Either we have single URL/pattern
+ # or URL/pattern + extra
+ if($items[0] =~ /\(/) {
+ # Since '+' is greedy, the second capture has no slashes
+ ($dir, $filter) = $items[0] =~ m{^(.+)/(.+)$};
+ } elsif(@items >= 2 and $items[1] =~ /\(/) {
+ # or, we have a homepage plus pattern
+ # (plus optional other non-interesting stuff)
+ ($dir, $filter) = @items[0,1];
+ }
+ unless($dir and $filter) {
+ return("Invalid");
+ }
+ debug("uscan $dir $filter");
+ my @vers;
+ if($CFG{watch}{use_cpan} and $dir =~ $cpanregex) {
+ @vers = cpan_lookup($dir, $filter);
+ my $status = shift @vers;
+ if($status) {
+ warn("CPAN lookup failed for $dir + $filter: $status");
+ return $status;
+ } elsif(not @vers) {
+ warn("CPAN lookup failed for $dir + $filter");
+ }
+ }
+ unless(@vers) {
+ @vers = recurse_dirs($filter, $dir, "");
+ my $status = shift @vers;
+ return $status || "NotFound" unless(@vers);
+ }
+
+ my @mangled;
+ foreach my $uver (@vers) {
+ push @mangled, $uver->{upstream_version};
+ next unless($opts{uversionmangle});
+ debug("Mangle option: ", join(", ", @{$opts{uversionmangle}}));
+ foreach(split(/;/, join(";", @{$opts{uversionmangle}}))) {
+ debug("Executing '\$mangled[-1] =~ $_'");
+ eval "\$mangled[-1] =~ $_";
+ if($@) {
+ error("Invalid watchfile: $@");
+ return("Invalid");
+ }
+ }
+ debug("Mangled version: $mangled[-1]");
+ }
+ my @order = sort({ deb_compare_nofail($mangled[$a], $mangled[$b]) }
+ (0..$#vers));
+ return(undef,
+ %{$vers[$order[-1]]},
+ upstream_mangled => $mangled[$order[-1]]);
+}
+sub recurse_dirs($$$);
+sub recurse_dirs($$$) {
+ my($filter, $base, $remaining) = @_;
+ debug("recurse_dirs($filter, $base, $remaining)");
+
+ if($base =~ s{/([^/]*?\(.*)}{}) {
+ $remaining = "$1/$remaining";
+ }
+ my @rparts = split(/\/+/, $remaining) if($remaining);
+ while(@rparts and $rparts[0] !~ /\(/) {
+ $base .= "/" . shift @rparts;
+ }
+ if(@rparts) {
+ my ($status, @data) = recurse_dirs($rparts[0]."/?", $base, "");
+ return $status unless(@data);
+ @data = sort({ deb_compare_nofail($a->{upstream_version},
+ $b->{upstream_version}) } @data);
+ $base = $data[-1]{upstream_url};
+ }
+ unless($base =~ m{(^\w+://[^/]+)(/.*?)/*$}) {
+ error("Invalid base: $base");
+ return("Invalid");
+ }
+ my $site = $1;
+ my $path = $2;
+ my $pattern;
+ if($filter =~ m{^/}) {
+ $pattern = qr{(?:^\Q$site\E)?$filter};
+ } elsif($filter !~ m{^\w+://}) {
+ $pattern = qr{(?:(?:^\Q$site\E)?\Q$path\E/)?$filter};
+ } else {
+ $pattern = $filter;
+ }
+
+ debug("Downloading $base");
+ my $res = $ua->get($base);
+ unless($res->is_success) {
+ error("Unable to get $base: " . $res->message());
+ return ("NotFound") if($res->code == 404);
+ return ("DownloadError");
+ }
+ my $page = $res->decoded_content();
+ $page =~ s/<!--.*?-->//gs;
+ $page =~ s/\n+/ /gs;
+
+ my @candidates;
+ if($base =~ /^ftp/) {
+ @candidates = split(/\s+/, $page);
+ } else {
+ @candidates = grep(defined, ($page =~
+ m{<a\s[^>]*href\s*=\s*(?:"([^"]+)"|'([^']+)'|([^"]\S+))}gi));
+ }
+ my @vers;
+ foreach my $url (grep(m{^$pattern$}, @candidates)) {
+ $url =~ m{^$pattern$};
+ my @ver = map({substr($url, $-[$_], $+[$_] - $-[$_])} (1..$#+));
+ if($url =~ m{^/}) {
+ $url = $site . $url;
+ } elsif($url !~ m{^\w+://}) {
+ $url = $site . $path . "/" . $url;
+ }
+ push @vers, {
+ upstream_version => join(".", @ver),
+ upstream_url => $url };
+ }
+ debug("Versions found: ", join(", ", map({ $_->{upstream_version} }
+ @vers)));
+ return(undef, @vers);
+}
+
+sub cpan_lookup($$) {
+ my($dir, $filter) = @_;
+
+ $dir =~ $cpanregex or return ();
+ my $base = $1;
+ my $type = $2;
+ $dir =~ s{/+$}{};
+ my $origdir = $dir;
+
+ $type =~ s/.*(dist|modules|authors).*/$1/ or return ();
+ my $cpan;
+ if($type eq "dist") {
+ $filter =~ s/.*\///;
+ $cpan = cpan_dist_download();
+ } else {
+ $cpan = cpan_index_download()->{$type};
+ }
+ $dir =~ s/$cpanregex//i;
+ $dir =~ s{^/+}{};
+ debug("Looking for $dir + $filter into CPAN $type cache");
+ #return ("NotFound") unless(exists($cpan->{$dir}));
+ # Allow this to gracefully degrade to a normal uscan check
+ return () unless(exists($cpan->{$dir}));
+
+ my @res;
+ foreach(keys %{$cpan->{$dir}}) {
+ next unless ($_ =~ $filter);
+ my $filt_ver = $1;
+ if($type eq "dist") {
+ my $cpan_ver = $cpan->{$dir}{$_}{version};
+ if($filt_ver ne $cpan_ver) {
+ # Try to remove initial "v"s, if any
+ $cpan_ver =~ s/^v//;
+ }
+ if($filt_ver ne $cpan_ver) {
+ warn("Version mismatch: uscan says $filt_ver, ",
+ "cpan says $cpan_ver");
+ return ("VersionMismatch");
+ }
+ }
+ push @res, {
+ upstream_version => $filt_ver,
+ upstream_url => (
+ $type eq "dist" ?
+ "$base/CPAN/authors/id/" . $cpan->{$dir}{$_}{path} :
+ "$origdir/$_"
+ )
+ };
+ }
+ # Allow this to gracefully degrade to a normal uscan check
+ #return ("NotFound") unless(@res);
+ return (undef, @res);
+}
+sub cpan_dist_download(;$) {
+ my $force = shift;
+ unless($force) {
+ my $cpan = read_cache("cpan_dists", "", 0);
+ if($CFG{watch}{cpan_ttl} * 60 > time - find_stamp($cpan, "")) {
+ return $cpan;
+ }
+ }
+
+ my $url = $CFG{watch}{cpan_mirror} . "/modules/02packages.details.txt.gz";
+ info("Rebuilding CPAN dists cache from $url");
+ open(TMP, "+>", undef) or die $!;
+ my $res = $ua->get($url, ":content_cb" => sub {
+ print TMP $_[0] or die $!;
+ });
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ seek(TMP, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $gz = Compress::Zlib::gzopen(\*TMP, "rb")
+ or die "Can't open compressed file: $!\n";
+
+ my $data;
+ open($data, "+>", undef) or die $!;
+ my $buffer = " " x 4096;
+ my $bytes;
+ while(($bytes = $gz->gzread($buffer)) > 0) {
+ print $data $buffer;
+ }
+ die $gz->gzerror if($bytes < 0);
+ close TMP;
+ #my $z = new IO::Uncompress::Gunzip(\$data);
+
+ seek($data, 0, SEEK_SET) or die "Can't seek: $!\n";
+
+ # Skip header
+ while(<$data>) {
+ chomp;
+ last if(/^$/);
+ }
+ my $cpan = {};
+ while(<$data>) {
+ chomp;
+ my $tarball = (split)[2];
+ my $distinfo = new CPAN::DistnameInfo($tarball);
+# next if($distinfo->maturity() eq "developer");
+ my $distname = $distinfo->dist();
+ unless($distname) {
+ info("Invalid CPAN distribution: $tarball");
+ next;
+ }
+ my $version = $distinfo->version();
+ my $filename = $distinfo->filename();
+
+ $cpan->{$distname}{$filename} = {
+ path => $tarball,
+ version => $version
+ };
+ }
+ close $data;
+ update_cache("cpan_dists", $cpan, "", 1);
+ return $cpan;
+}
+sub cpan_index_download(;$) {
+ my $force = shift;
+ unless($force) {
+ my $cpan = read_cache("cpan_index", "", 0);
+ if($CFG{watch}{cpan_ttl} * 60 > time - find_stamp($cpan, "")) {
+ return $cpan;
+ }
+ }
+
+ my $url = $CFG{watch}{cpan_mirror} . "/indices/ls-lR.gz";
+ info("Rebuilding CPAN indices cache from $url");
+ open(TMP, "+>", undef) or die $!;
+ my $res = $ua->get($url, ":content_cb" => sub {
+ print TMP $_[0] or die $!;
+ });
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ seek(TMP, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $gz = Compress::Zlib::gzopen(\*TMP, "rb")
+ or die "Can't open compressed file: $!\n";
+
+ my $data;
+ open($data, "+>", undef) or die $!;
+ my $buffer = " " x 4096;
+ my $bytes;
+ while(($bytes = $gz->gzread($buffer)) > 0) {
+ print $data $buffer;
+ }
+ die $gz->gzerror if($bytes < 0);
+ close TMP;
+ #my $z = new IO::Uncompress::Gunzip(\$data);
+
+ seek($data, 0, SEEK_SET) or die "Can't seek: $!\n";
+
+ my $cpan = {};
+ my($dir, $type);
+ while(<$data>) {
+ chomp;
+ if(/^(.+):$/) {
+ my $subdir = $1;
+ $type = undef;
+ $subdir =~ m{/.*(authors/id|modules/by-module)/+(.*?)/*$} or next;
+ $dir = $2;
+ $1 =~ /(authors|modules)/ and $type = $1;
+ next;
+ }
+ next unless($type and /^[-l]r.....r/);
+ s/ -> .*//;
+ my $file = (split)[8];
+ $file =~ m{\.(?:bz2|gz|zip|pl|pm|tar|tgz)$}i or next;
+ $cpan->{$type}{$dir}{$file} = 1;
+ }
+ close $data;
+ update_cache("cpan_index", $cpan, "", 1);
+ return $cpan;
+}
+1;
Added: trunk/community/qa/Parse/DebControl.pm
===================================================================
--- trunk/community/qa/Parse/DebControl.pm (rev 0)
+++ trunk/community/qa/Parse/DebControl.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,811 @@
+package Parse::DebControl;
+
+###########################################################
+# Parse::DebControl - Parse debian-style control
+# files (and other colon key-value fields)
+#
+# Copyright 2003 - Jay Bonci <jaybonci at cpan.org>
+# Licensed under the same terms as perl itself
+#
+###########################################################
+
+use strict;
+use IO::Scalar;
+use Compress::Zlib;
+use LWP::UserAgent;
+
+use vars qw($VERSION);
+$VERSION = '2.005';
+
+sub new {
+ my ($class, $debug) = @_;
+ my $this = {};
+
+ my $obj = bless $this, $class;
+ if($debug)
+ {
+ $obj->DEBUG();
+ }
+ return $obj;
+};
+
+sub parse_file {
+ my ($this, $filename, $options) = @_;
+ unless($filename)
+ {
+ $this->_dowarn("parse_file failed because no filename parameter was given");
+ return;
+ }
+
+ my $fh;
+ unless(open($fh,"$filename"))
+ {
+ $this->_dowarn("parse_file failed because $filename could not be opened for reading");
+ return;
+ }
+
+ return $this->_parseDataHandle($fh, $options);
+};
+
+sub parse_mem {
+ my ($this, $data, $options) = @_;
+
+ unless($data)
+ {
+ $this->_dowarn("parse_mem failed because no data was given");
+ return;
+ }
+
+ my $IOS = new IO::Scalar \$data;
+
+ unless($IOS)
+ {
+ $this->_dowarn("parse_mem failed because IO::Scalar creation failed.");
+ return;
+ }
+
+ return $this->_parseDataHandle($IOS, $options);
+
+};
+
+sub parse_web {
+ my ($this, $url, $options) = @_;
+
+ unless($url)
+ {
+ $this->_dowarn("No url given, thus no data to parse");
+ return;
+ }
+
+ my $ua = LWP::UserAgent->new;
+
+ my $request = HTTP::Request->new(GET => $url);
+
+ unless($request)
+ {
+ $this->_dowarn("Failed to instantiate HTTP Request object");
+ return;
+ }
+
+ my $response = $ua->request($request);
+
+ if ($response->is_success) {
+ return $this->parse_mem($response->content(), $options);
+ } else {
+ $this->_dowarn("Failed to fetch $url from the web");
+ return;
+ }
+}
+
+sub write_file {
+ my ($this, $filenameorhandle, $dataorarrayref, $options) = @_;
+
+ unless($filenameorhandle)
+ {
+ $this->_dowarn("write_file failed because no filename or filehandle was given");
+ return;
+ }
+
+ unless($dataorarrayref)
+ {
+ $this->_dowarn("write_file failed because no data was given");
+ return;
+ }
+
+ my $handle = $this->_getValidHandle($filenameorhandle, $options);
+
+ unless($handle)
+ {
+ $this->_dowarn("write_file failed because we couldn't negotiate a valid handle");
+ return;
+ }
+
+ my $string = $this->write_mem($dataorarrayref, $options);
+ $string ||= "";
+
+ print $handle $string;
+ close $handle;
+
+ return length($string);
+}
+
+sub write_mem {
+ my ($this, $dataorarrayref, $options) = @_;
+
+ unless($dataorarrayref)
+ {
+ $this->_dowarn("write_mem failed because no data was given");
+ return;
+ }
+
+ my $arrayref = $this->_makeArrayref($dataorarrayref);
+
+ my $string = $this->_makeControl($arrayref);
+
+ $string .= "\n" if $options->{addNewline};
+
+ $string = Compress::Zlib::memGzip($string) if $options->{gzip};
+
+ return $string;
+}
+
+sub DEBUG
+{
+ my($this, $verbose) = @_;
+ $verbose = 1 unless(defined($verbose) and int($verbose) == 0);
+ $this->{_verbose} = $verbose;
+ return;
+
+}
+
+sub _getValidHandle {
+ my($this, $filenameorhandle, $options) = @_;
+
+ if(ref $filenameorhandle eq "GLOB")
+ {
+ unless($filenameorhandle->opened())
+ {
+ $this->_dowarn("Can't get a valid filehandle to write to, because that is closed");
+ return;
+ }
+
+ return $filenameorhandle;
+ }else
+ {
+ my $openmode = ">>";
+ $openmode=">" if $options->{clobberFile};
+ $openmode=">>" if $options->{appendFile};
+
+ my $handle;
+
+ unless(open $handle,"$openmode$filenameorhandle")
+ {
+ $this->_dowarn("Couldn't open file: $openmode$filenameorhandle for writing");
+ return;
+ }
+
+ return $handle;
+ }
+}
+
+sub _makeArrayref {
+ my ($this, $dataorarrayref) = @_;
+
+ if(ref $dataorarrayref eq "ARRAY")
+ {
+ return $dataorarrayref;
+ }else{
+ return [$dataorarrayref];
+ }
+}
+
+sub _makeControl
+{
+ my ($this, $dataorarrayref) = @_;
+
+ my $str = "";
+
+ foreach my $stanza(@$dataorarrayref)
+ {
+ foreach my $key(keys %$stanza)
+ {
+ $stanza->{$key} ||= "";
+
+ my @lines = split("\n", $stanza->{$key});
+ if (@lines) {
+ $str.="$key\: ".(shift @lines)."\n";
+ } else {
+ $str.="$key\:\n";
+ }
+
+ foreach(@lines)
+ {
+ if($_ eq "")
+ {
+ $str.=" .\n";
+ }
+ else{
+ $str.=" $_\n";
+ }
+ }
+
+ }
+
+ $str ||= "";
+ $str.="\n";
+ }
+
+ chomp($str);
+ return $str;
+
+}
+
+sub _parseDataHandle
+{
+ my ($this, $handle, $options) = @_;
+
+ my $structs;
+
+ unless($handle)
+ {
+ $this->_dowarn("_parseDataHandle failed because no handle was given. This is likely a bug in the module");
+ return;
+ }
+
+ if($options->{tryGzip})
+ {
+ if(my $gunzipped = $this->_tryGzipInflate($handle))
+ {
+ $handle = new IO::Scalar \$gunzipped
+ }
+ }
+
+ my $data = $this->_getReadyHash($options);
+
+ my $linenum = 0;
+ my $lastfield = "";
+
+ foreach my $line (<$handle>)
+ {
+ #Sometimes with IO::Scalar, lines may have a newline at the end
+
+ #$line =~ s/\r??\n??$//; #CRLF fix, but chomp seems to clean it
+ chomp $line;
+
+
+ if($options->{stripComments}){
+ next if $line =~ /^\s*\#[^\#]/;
+ $line =~ s/\#$//;
+ $line =~ s/(?<=[^\#])\#[^\#].*//;
+ $line =~ s/\#\#/\#/;
+ }
+
+ $linenum++;
+ if($line =~ /^\S/)
+ {
+ #we have a valid key-value pair
+ if($line =~ /(.*?)\s*\:\s*(.*)$/)
+ {
+ my $key = $1;
+ my $value = $2;
+
+ if($options->{discardCase})
+ {
+ $key = lc($key);
+ }
+
+ unless($options->{verbMultiLine})
+ {
+ $value =~ s/[\s\t]+$//;
+ }
+
+ $data->{$key} = $value;
+
+
+ if ($options->{verbMultiLine}
+ && (($data->{$lastfield} || "") =~ /\n/o)){
+ $data->{$lastfield} .= "\n";
+ }
+
+ $lastfield = $key;
+ }else{
+ $this->_dowarn("Parse error on line $linenum of data; invalid key/value stanza");
+ return $structs;
+ }
+
+ }elsif($line =~ /^(\s+)(\S.*)/)
+ {
+ #appends to previous line
+
+ unless($lastfield)
+ {
+ $this->_dowarn("Parse error on line $linenum of data; indented entry without previous line");
+ return $structs;
+ }
+ if($options->{verbMultiLine}){
+ $data->{$lastfield}.="\n$1$2";
+ }elsif($2 eq "." ){
+ $data->{$lastfield}.="\n";
+ }else{
+ my $val = $2;
+ $val =~ s/\s+$//;
+ $data->{$lastfield}.="\n$val";
+ }
+
+ }elsif($line =~ /^\s*$/){
+ if ($options->{verbMultiLine}
+ && ($data->{$lastfield} =~ /\n/o)) {
+ $data->{$lastfield} .= "\n";
+ }
+ if(keys %$data > 0){
+ push @$structs, $data;
+ }
+ $data = $this->_getReadyHash($options);
+ $lastfield = "";
+ }else{
+ $this->_dowarn("Parse error on line $linenum of data; unidentified line structure");
+ return $structs;
+ }
+
+ }
+
+ if(keys %$data > 0)
+ {
+ push @$structs, $data;
+ }
+
+ return $structs;
+}
+
+sub _tryGzipInflate
+{
+ my ($this, $handle) = @_;
+
+ my $buffer;
+ {
+ local $/ = undef;
+ $buffer = <$handle>;
+ }
+ return Compress::Zlib::memGunzip($buffer) || $buffer;
+}
+
+sub _getReadyHash
+{
+ my ($this, $options) = @_;
+ my $data;
+
+ if($options->{useTieIxHash})
+ {
+ eval("use Tie::IxHash");
+ if($@)
+ {
+ $this->_dowarn("Can't use Tie::IxHash. You need to install it to have this functionality");
+ return;
+ }
+ tie(%$data, "Tie::IxHash");
+ return $data;
+ }
+
+ return {};
+}
+
+sub _dowarn
+{
+ my ($this, $warning) = @_;
+
+ if($this->{_verbose})
+ {
+ warn "DEBUG: $warning";
+ }
+
+ return;
+}
+
+
+1;
+
+__END__
+
+=head1 NAME
+
+Parse::DebControl - Easy OO parsing of debian control-like files
+
+=head1 SYNOPSIS
+
+ use Parse::DebControl
+
+ $parser = new Parse::DebControl;
+
+ $data = $parser->parse_mem($control_data, $options);
+ $data = $parser->parse_file('./debian/control', $options);
+ $data = $parser->parse_web($url, $options);
+
+ $writer = new Parse::DebControl;
+
+ $string = $writer->write_mem($singlestanza);
+ $string = $writer->write_mem([$stanza1, $stanza2]);
+
+ $writer->write_file($filename, $singlestanza, $options);
+ $writer->write_file($filename, [$stanza1, $stanza2], $options);
+
+ $writer->write_file($handle, $singlestanza, $options);
+ $writer->write_file($handle, [$stanza1, $stanza2], $options);
+
+ $parser->DEBUG();
+
+=head1 DESCRIPTION
+
+ Parse::DebControl is an easy OO way to parse debian control files and
+ other colon separated key-value pairs. It's specifically designed
+ to handle the format used in Debian control files, template files, and
+ the cache files used by dpkg.
+
+ For basic format information see:
+ http://www.debian.org/doc/debian-policy/ch-controlfields.html#s-controlsyntax
+
+ This module does not actually do any intelligence with the file content
+ (because there are a lot of files in this format), but merely handles
+ the format. It can handle simple control files, or files hundreds of lines
+ long efficiently and easily.
+
+=head2 Class Methods
+
+=over 4
+
+=item * C<new()>
+
+=item * C<new(I<$debug>)>
+
+Returns a new Parse::DebControl object. If a true parameter I<$debug> is
+passed in, it turns on debugging, similar to a call to C<DEBUG()> (see below);
+
+=back
+
+=over 4
+
+=item * C<parse_file($control_filename,I<$options>)>
+
+Takes a filename as a scalar and an optional hashref of options (see below).
+Will parse as much as it can, warning (if C<DEBUG>ing is turned on) on
+parsing errors.
+
+Returns an array of hashrefs, containing the data in the control file, split up
+by stanza. Stanzas are deliniated by newlines, and multi-line fields are
+expressed as such post-parsing. Single periods are treated as special extra
+newline deliniators, per convention. Whitespace is also stripped off of lines
+as to make it less-easy to make mistakes with hand-written conf files).
+
+The options hashref can take parameters as follows. Setting the string to true
+enables the option.
+
+ useTieIxHash - Instead of an array of regular hashrefs, uses Tie::IxHash-
+ based hashrefs
+
+ discardCase - Remove all case items from keys (not values)
+
+ stripComments - Remove all commented lines in standard #comment format.
+ Literal #'s are represented by ##. For instance
+
+ Hello there #this is a comment
+ Hello there, I like ##CCCCCC as a grey.
+
+ The first is a comment, the second is a literal "#".
+
+ verbMultiLine - Keep the description AS IS, and no not collapse leading
+ spaces or dots as newlines. This also keeps whitespace from being
+ stripped off the end of lines.
+
+ tryGzip - Attempt to expand the data chunk with gzip first. If the text is
+ already expanded (ie: plain text), parsing will continue normally.
+ This could optionally be turned on for all items in the future, but
+ it is off by default so we don't have to scrub over all the text for
+ performance reasons.
+
+=back
+
+=over 4
+
+=item * C<parse_mem($control_data, I<$options>)>
+
+Similar to C<parse_file>, except takes data as a scalar. Returns the same
+array of hashrefs as C<parse_file>. The options hashref is the same as
+C<parse_file> as well; see above.
+
+=back
+
+=over 4
+
+=item * C<parse_web($url, I<$options>)>
+
+Similar to the other parse_* functions, this pulls down a control file from
+the web and attempts to parse it. For options and return values, see C<parse_file>,
+above
+
+=back
+
+=over 4
+
+=item * C<write_file($filename, $data, I<$options>)>
+
+=item * C<write_file($handle, $data)>
+
+=item * C<write_file($filename, [$data1, $data2, $data3], I<$options>)>
+
+=item * C<write_file($handle, [$data, $data2, $data3])>
+
+This function takes a filename or a handle and writes the data out. The
+data can be given as a single hashref or as an arrayref of hashrefs. It
+will then write it out in a format that it can parse. The order is dependant
+on your hash sorting order. If you care, use Tie::IxHash. Remember for
+reading back in, the module doesn't care.
+
+The I<$options> hashref can contain one of the following two items:
+
+ addNewline - At the end of the last stanza, add an additional newline.
+ appendFile - (default) Write to the end of the file
+ clobberFile - Overwrite the file given.
+ gzip - Compress the data with gzip before writing
+
+Since you determine the mode of your filehandle, passing it along with an
+options hashref obviously won't do anything; rather, it is ignored.
+
+The I<addNewline> option solves a situation where if you are writing
+stanzas to a file in a loop (such as logging with this module), then
+the data will be streamed together, and won't parse back in correctly.
+It is possible that this is the behavior that you want (if you wanted to write
+one key at a time), so it is optional.
+
+This function returns the number of bytes written to the file, undef
+otherwise.
+
+=back
+
+=over 4
+
+=item * C<write_mem($data)>
+
+=item * C<write_mem([$data1,$data2,$data3])>;
+
+This function works similarly to the C<write_file> method, except it returns
+the control structure as a scalar, instead of writing it to a file. There
+is no I<%options> for this file (yet);
+
+=back
+
+=over 4
+
+=item * C<DEBUG()>
+
+Turns on debugging. Calling it with no paramater or a true parameter turns
+on verbose C<warn()>ings. Calling it with a false parameter turns it off.
+It is useful for nailing down any format or internal problems.
+
+=back
+
+=head1 CHANGES
+
+B<Version 2.005> - January 13th, 2004
+
+=over 4
+
+=item * More generic test suite fix for earlier versions of Test::More
+
+=item * Updated copyright statement
+
+=back
+
+B<Version 2.004> - January 12th, 2004
+
+=over 4
+
+=item * More documentation formatting and typo fixes
+
+=item * CHANGES file now generated automatically
+
+=item * Fixes for potential test suite failure in Pod::Coverage run
+
+=item * Adds the "addNewline" option to write_file to solve the streaming stanza problem.
+
+=item * Adds tests for the addNewline option
+
+=back
+
+B<Version 2.003> - January 6th, 2004
+
+=over 4
+
+=item * Added optional Test::Pod test
+
+=item * Skips potential Win32 test failure in the module where it wants to write to /tmp.
+
+=item * Added optional Pod::Coverage test
+
+=back
+
+B<Version 2.002> - October 7th, 2003
+
+=over 4
+
+=item * No code changes. Fixes to test suite
+
+=back
+
+B<Version 2.001> - September 11th, 2003
+
+=over 4
+
+=item * Cleaned up more POD errors
+
+=item * Added tests for file writing
+
+=item * Fixed bug where write_file ignored the gzip parameter
+
+=back
+
+B<Version 2.0> - September 5th, 2003
+
+=over 4
+
+=item * Version increase.
+
+=item * Added gzip support (with the tryGzip option), so that compresses control files can be parsed on the fly
+
+=item * Added gzip support for writing of control files
+
+=item * Added parse_web to snag files right off the web. Useful for things such as apt's Sources.gz and Packages.gz
+
+=back
+
+B<Version 1.10b> - September 2nd, 2003
+
+=over 4
+
+=item * Documentation fix for ## vs # in stripComments
+
+=back
+
+B<Version 1.10> - September 2nd, 2003
+
+=over 4
+
+=item * Documentation fixes, as pointed out by pudge
+
+=item * Adds a feature to stripComments where ## will get interpolated as a literal pound sign, as suggested by pudge.
+
+=back
+
+B<Version 1.9> - July 24th, 2003
+
+=over 4
+
+=item * Fix for warning for edge case (uninitialized value in chomp)
+
+=item * Tests for CRLF
+
+=back
+
+B<Version 1.8> - July 11th, 2003
+
+=over 4
+
+=item * By default, we now strip off whitespace unless verbMultiLine is in place. This makes sense for things like conf files where trailing whitespace has no meaning. Thanks to pudge for reporting this.
+
+=back
+
+B<Version 1.7> - June 25th, 2003
+
+=over 4
+
+=item * POD documentation error noticed again by Frank Lichtenheld
+
+=item * Also by Frank, applied a patch to add a "verbMultiLine" option so that we can hand multiline fields back unparsed.
+
+=item * Slightly expanded test suite to cover new features
+
+=back
+
+B<Version 1.6.1> - June 9th, 2003
+
+=over 4
+
+=item * POD cleanups noticed by Frank Lichtenheld. Thank you, Frank.
+
+=back
+
+B<Version 1.6> - June 2nd, 2003
+
+=over 4
+
+=item * Cleaned up some warnings when you pass in empty hashrefs or arrayrefs
+
+=item * Added stripComments setting
+
+=item * Cleaned up POD errors
+
+=back
+
+B<Version 1.5> - May 8th, 2003
+
+=over 4
+
+=item * Added a line to quash errors with undef hashkeys and writing
+
+=item * Fixed the Makefile.PL to straighten up DebControl.pm being in the wrong dir
+
+=back
+
+B<Version 1.4> - April 30th, 2003
+
+=over 4
+
+=item * Removed exports as they were unnecessary. Many thanks to pudge, who pointed this out.
+
+=back
+
+B<Version 1.3> - April 28th, 2003
+
+=over 4
+
+=item * Fixed a bug where writing blank stanzas would throw a warning. Fix found and supplied by Nate Oostendorp.
+
+=back
+
+B<Version 1.2b> - April 25th, 2003
+
+Fixed:
+
+=over 4
+
+=item * A bug in the test suite where IxHash was not disabled in 40write.t. Thanks to Jeroen Latour from cpan-testers for the report.
+
+=back
+
+B<Version 1.2> - April 24th, 2003
+
+Fixed:
+
+=over 4
+
+=item * A bug in IxHash support where multiple stanzas might be out of order
+
+=back
+
+B<Version 1.1> - April 23rd, 2003
+
+Added:
+
+=over 4
+
+=item * Writing support
+
+=item * Tie::IxHash support
+
+=item * Case insensitive reading support
+
+=back
+
+B<Version 1.0> - April 23rd, 2003
+
+=over 4
+
+=item * This is the initial public release for CPAN, so everything is new.
+
+=back
+
+=head1 BUGS
+
+The module will let you parse otherwise illegal key-value pairs and pairs with spaces. Badly formed stanzas will do things like overwrite duplicate keys, etc. This is your problem.
+
+As of 1.10, the module uses advanced regexp's to figure out about comments. If the tests fail, then stripComments won't work on your earlier perl version (should be fine on 5.6.0+)
+
+=head1 TODO
+
+Change the name over to the Debian:: namespace, probably as Debian::ControlFormat. This will happen as soon as the project that uses this module reaches stability, and we can do some minor tweaks.
+
+=head1 COPYRIGHT
+
+Parse::DebControl is copyright 2003,2004 Jay Bonci E<lt>jaybonci at cpan.orgE<gt>.
+This program is free software; you can redistribute it and/or modify it under
+the same terms as Perl itself.
+
+=cut
Added: trunk/community/qa/README
===================================================================
--- trunk/community/qa/README (rev 0)
+++ trunk/community/qa/README 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,145 @@
+# TODO: add some intro about commoncheck, maintainercheck, packagecheck,
+# wnppcheck
+
+About the new DebianQA scripts
+==============================
+
+There are a bunch of perl modules under the DebianQA namespace, some of them
+provide certain common functionality and the others are responsible of data
+collection from different sources.
+
+Data collection is completely separated from presentation. There are a couple
+of very simple scripts that provide the latter: qareport and qareport.cgi. They
+give more or less the same information on stdout, but the latter is meant to be
+used as a CGI script.
+
+The script that controls data collection is fetchdata, which is meant to be run
+from a cronjob and/or post-commit hook.
+
+You can ask for basic help with the --help option (not in the cgi version).
+
+All the scripts read from the same configuration file, which you specify with
+the --conf option, or with the DEBIAN_QA_CONF environment variable. The CGI
+script doesn't have the --conf option, obviously.
+
+For a sample configuration file, see the debianqa.conf-sample file. It is
+mostly self-explaining. Don't forget to set a suitable cache_dir, that other
+members of your group can write to, and put an absolute path for the template
+dir.
+
+Cheat sheet for usual svn layouts:
+
+Layout 1 (python-modules example):
+----------------------------------
+
+For a structure like:
+
+svn://svn.debian.org/svn/python-modules/packages/<package>/trunk/
+
+You should use:
+
+[qareport_cgi]
+wsvn_url = http://svn.debian.org/wsvn/python-modules/packages/%s/trunk
+
+[svn]
+repository = svn://svn.debian.org/svn/python-modules/
+packages_path = packages
+post_path = trunk
+
+
+Layout 2 (pkg-perl example):
+----------------------------
+
+For a structure like:
+
+svn://svn.debian.org/svn/pkg-perl/trunk/<package>/debian/
+
+You should use:
+
+[qareport_cgi]
+wsvn_url = http://svn.debian.org/wsvn/pkg-perl/trunk/%s
+
+[svn]
+repository = svn://svn.debian.org/svn/pkg-perl/
+packages_path = trunk
+post_path = /
+
+
+First run
+=========
+
+After configuring, you run the initial download, it could take a long time:
+
+$ <path>/fetchdata --conf <pathtoconf> [-v[v..]] [-j]
+
+-v increases verbosity, and -j enables working in parallel (3 threads).
+
+After that, it will use the cached data if it's not stale, or it will download
+what's necessary. You can also specify package directories to avoid updating
+the whole database.
+
+Package status in the command line
+==================================
+
+With qareport you can see in your shell the packages' status, you can have the
+full listing or only the specified packages:
+
+$ <path>/qareport --conf <pathtoconf> [<package> [<package> ... ]]
+
+asterisk:
+ - Version status: Watchfile problem
+ + Watch status: DownloadError
+ + SVN: 1:1.4.13~dfsg-1 (mangled: 1.4.13) (unreleased: 1:1.4.13~dfsg-2) Archive: 1:1.4.13~dfsg-1 (unstable) Upstream: Unknown (mangled: Unknown)
+ + Bugs: #396499, #448171, #433779, #337209, #386114, #399807, #399970, #449706, #381786, #438702, #293751, #353227
+(...)
+
+Using the CGI script
+====================
+
+Copy or symlink qareport.cgi to your project's cgi-bin directory, and copy the
+htaccess (renaming it to .htaccess). There you should configure the paths to
+find the libraries and the configuration.
+
+Once done that, you will be able to see a nice XHTML version of the status
+report. You can write your own template, and switch between them with a GET
+parameter: http://..../cgi-bin/qareport.cgi?template=my_nice_template
+
+Setting a post-commit hook
+==========================
+
+If you want to have the information updated the moment you commit a change, you
+can add this lines in your post-commit hook:
+
+REPOS="$1"
+REV="$2"
+
+[...]
+
+umask 002
+BASE=<path_to_your_local_copy>
+PERL5LIB=$BASE $BASE/fetchdata \
+ -c <path_to_conf>/qa.conf -r "$REV"
+
+The -r switch sets post-commit mode: it only checks changes in the repository,
+and then verifies if it needs to update upstream information for the packages
+modified.
+
+
+Setting a cron job
+==================
+
+All the data you downloaded in the first run gets stale after some time, so you
+need to run a full check to acquire again what's old (the time to live of each
+data source is controlled from the configuration file). So, the best thing to
+do is to set up an periodic cron job (once each one or two hours is a good
+period, it won't waste bandwith if the data is still current):
+
+$ crontab -l
+# m h dom mon dow command
+
+BINDIR=<path_to_your_local_copy>
+PERL5LIB=<path_to_your_local_copy>
+
+0 * * * * $BINDIR/fetchdata -c <path_to_conf>/qa.conf
+
+$Id: README 13821 2008-01-29 06:47:39Z tincho-guest $
Added: trunk/community/qa/commoncheck
===================================================================
--- trunk/community/qa/commoncheck (rev 0)
+++ trunk/community/qa/commoncheck 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,27 @@
+# defaults
+REPO=svn://svn.debian.org/svn/pkg-perl
+MIRROR=ftp://ftp.debian.org
+CPAN=ftp://cpan.org/pub/CPAN
+
+# special hosts
+HOST=$(hostname)
+case $HOST in
+ belanna|nerys)
+ MIRROR=ftp://ftp.at.debian.org
+ CPAN=ftp://gd.tuwien.ac.at/pub/CPAN
+ ;;
+ alioth)
+ REPO=file:///svn/pkg-perl
+ MIRROR=ftp://ftp.nl.debian.org
+ CPAN=ftp://cpan.wanadoo.nl/pub/CPAN
+ ;;
+esac
+
+# This mirror is near alioth. From #alioth:
+# <ard> ard at c32791:~$ sudo /usr/sbin/traceroute -A cpan.wanadoo.nl|grep AS1200
+# <ard> traceroute to ftp.wanadoo.nl (194.134.17.10), 64 hops max, 40 byte packets
+# <ard> 5 ams-ix.euro.net (195.69.144.70) [AS1200] 1 ms 1 ms 1 ms
+# <ard> jups
+# <ard> 10G going to as1200
+# <ard> As long as it passes as1200 it's ok... Everything else is $$ :-(
+# CPAN=ftp://cpan.wanadoo.nl/pub/CPAN
Added: trunk/community/qa/debian-med.conf
===================================================================
--- trunk/community/qa/debian-med.conf (rev 0)
+++ trunk/community/qa/debian-med.conf 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,53 @@
+; vim:syntax=dosini
+;
+; Sample config for DebianQA scripts
+;
+; A "~/" appearing at the beginning of a string will be replaced for the user's
+; home directory
+[qareport_cgi]
+templates_path = templates
+default_template = by_category
+group_name = Debian-Med
+group_url = http://debian-med.alioth.debian.org/
+; sprintf format for the package wsvn location, takes one parameter, the
+; package directory.
+wsvn_url = http://svn.debian.org/wsvn/debian-med/trunk/packages/%s/trunk/?rev=0&sc=0
+
+[svn]
+repository = svn://svn.debian.org/svn/debian-med/trunk
+packages_path = packages
+; path after the package name, should be the parent of the "debian/" directory
+post_path = trunk
+
+[archive]
+mirror = ftp://ftp.debian.org/debian
+suites = unstable, testing, stable, oldstable, experimental
+sections = main, contrib, non-free
+suites_ttl = 360, 360, 10080, 10080, 360
+new_url = http://ftp-master.debian.org/new.html
+new_ttl = 60
+incoming_url = http://incoming.debian.org
+incoming_ttl = 60
+
+#[watch] # Not implemented yet
+#ttl = 360 # 6 hours
+##use_cpan = 1
+#cpan_mirror = ftp://cpan.org
+#cpan_ttl = 360 # 6 hours
+
+[bts]
+ttl = 360 # 6 hours
+soap_proxy = http://bugs.debian.org/cgi-bin/soap.cgi
+soap_uri = Debbugs/SOAP
+; wontfix, pending, etch, sarge, etc
+ignore_keywords =
+; wishlist, minor
+ignore_severities =
+
+; Parameters before any section header go into the [common] section
+[common]
+cache_dir = ~/.debianqa
+; verbosity level: error => 0, warn => 1, info => 2 debug => 3
+verbose = 2
+; Prepend syslog-style format?
+formatted_log => 1
Added: trunk/community/qa/debianqa.conf-sample
===================================================================
--- trunk/community/qa/debianqa.conf-sample (rev 0)
+++ trunk/community/qa/debianqa.conf-sample 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,53 @@
+; vim:syntax=dosini
+;
+; Sample config for DebianQA scripts
+;
+; A "~/" appearing at the beginning of a string will be replaced for the user's
+; home directory
+[qareport_cgi]
+templates_path = templates
+default_template = by_category
+group_name = Debian Foo Group
+group_url = http://pkg-foo.alioth.debian.org/
+; sprintf format for the package wsvn location, takes one parameter, the
+; package directory.
+;wsvn_url = http://svn.debian.org/wsvn/pkg-perl/trunk/%s
+
+[svn]
+repository = svn://svn.debian.org/svn/pkg-foo
+packages_path = trunk
+; path after the package name, should be the parent of the "debian/" directory
+; post_path = trunk
+
+[archive]
+mirror = ftp://ftp.debian.org/debian
+suites = unstable, testing, stable, oldstable, experimental
+sections = main, contrib, non-free
+suites_ttl = 360, 360, 10080, 10080, 360
+new_url = http://ftp-master.debian.org/new.html
+new_ttl = 60
+incoming_url = http://incoming.debian.org
+incoming_ttl = 60
+
+[watch] # Not implemented yet
+ttl = 360 # 6 hours
+use_cpan = 1
+cpan_mirror = ftp://cpan.org
+cpan_ttl = 360 # 6 hours
+
+[bts]
+ttl = 360 # 6 hours
+soap_proxy = http://bugs.debian.org/cgi-bin/soap.cgi
+soap_uri = Debbugs/SOAP
+; wontfix, pending, etch, sarge, etc
+ignore_keywords =
+; wishlist, minor
+ignore_severities =
+
+; Parameters before any section header go into the [common] section
+[common]
+cache_dir = ~/.debianqa
+; verbosity level: error => 0, warn => 1, info => 2 debug => 3
+verbose = 1
+; Prepend syslog-style format?
+formatted_log => 1
Added: trunk/community/qa/fetchdata
===================================================================
--- trunk/community/qa/fetchdata (rev 0)
+++ trunk/community/qa/fetchdata 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,124 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: fetchdata 9026 2007-11-08 12:09:53Z tincho-guest $
+#
+# Program for invoking the different data-fetching routines. To use from a
+# cronjob, interactively or on post-commit hooks.
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+use DebianQA::Archive;
+use DebianQA::BTS;
+use DebianQA::Common;
+use DebianQA::Config;
+use DebianQA::Svn;
+use DebianQA::Watch;
+use Getopt::Long;
+
+my $opts = getopt_common(1, 1);
+
+my $p = new Getopt::Long::Parser;
+$p->configure(qw(no_ignore_case bundling));
+
+my $list_is_packages = 0;
+my $svn_rev;
+my $parallel = 0;
+my $only;
+$p->getoptions('help|h|?' => \&help, 'packages!' => \$list_is_packages,
+ 'svn-revision|r=i' => \$svn_rev, 'parallel|j!' => \$parallel,
+ 'only=s' => \$only) or die "Error parsing command-line arguments!\n";
+
+die "Invalid module $only" if($only and $only !~ /^(svn|watch|bts|archive)$/);
+if($svn_rev) {
+ info("Enabling post-commit mode");
+ $only = "svn";
+ $opts->{force} = 0;
+}
+my @dirs = @ARGV;
+my @pkgs;
+
+if($list_is_packages) {
+ @pkgs = @dirs;
+ foreach(0..$#pkgs) {
+ $dirs[$_] = pkgsname2svndir($pkgs[$_]) || $pkgs[$_]; # Fallback
+ }
+} else {
+ foreach(0..$#dirs) {
+ $pkgs[$_] = svndir2pkgname($dirs[$_]) || $dirs[$_]; # Fallback
+ }
+}
+# We need this first
+my @changed_pkgs = svn_download($opts->{force}, $svn_rev, @dirs) if(
+ !$only or $only eq "svn");
+
+# returns dirs, not packages
+foreach(0..$#changed_pkgs) {
+ $changed_pkgs[$_] = svndir2pkgname($changed_pkgs[$_]) || $changed_pkgs[$_];
+}
+info(scalar @changed_pkgs, " changed packages in svn");
+debug("Changed packages in svn: ", join(", ", @changed_pkgs));
+
+if($parallel) {
+ local $SIG{CHLD} = "IGNORE";
+ my @pids;
+ my $pid;
+ foreach(0..2) {
+ unless(defined($pid = fork())) {
+ die "Can't fork: $!";
+ }
+ last unless($pid);
+ push @pids, $pid;
+ }
+ if(@pids == 2) {
+ deb_download($opts->{force}) if(!$only or $only eq "archive"); exit 0;
+ } elsif(@pids == 1) {
+ bts_download($opts->{force}, @pkgs) if(!$only or $only eq "bts");
+ exit 0;
+ } elsif(@pids == 0) {
+ if($svn_rev and @changed_pkgs) { # post-commit mode
+ watch_download($opts->{force}, @changed_pkgs);
+ } else {
+ watch_download($opts->{force}, @pkgs) if(!$only
+ or $only eq "watch");
+ }
+ exit 0;
+ } else {
+ waitpid($_, 0) foreach(@pids);
+ }
+} else {
+ deb_download($opts->{force}) if(!$only or $only eq "archive");
+ bts_download($opts->{force}, @pkgs) if(!$only or $only eq "bts");
+ if($svn_rev and @changed_pkgs) { # post-commit mode
+ watch_download($opts->{force}, @changed_pkgs);
+ } else {
+ watch_download($opts->{force}, @pkgs) if(!$only or $only eq "watch");
+ }
+}
+
+sub help {
+ print <<END;
+Usage:
+ $0 [options] [dirname [dirname ...]]
+
+ For each named directory, updates the databases with information retrieved
+ from the Debian archive, BTS, watchfiles and the Subversion repository.
+
+Options:
+ --help, -h This help.
+ --conf, -c FILE Specifies a configuration file, uses defaults if not
+ present.
+ --force, -f Force operation: ignore caches.
+ --packages Treat the parameters as source package names, instead of
+ directories.
+ --svn-revision,
+ -r REV Current revision for scanning the Subversion repository,
+ only scans svn and watch files changed (post-commit mode).
+ --parallel, -j Process in parallel (it will fork three processes).
+ --only MODULE Only run update for MODULE (svn|archive|watch|bts).
+
+END
+ exit 0;
+}
Property changes on: trunk/community/qa/fetchdata
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/htaccess
===================================================================
--- trunk/community/qa/htaccess (rev 0)
+++ trunk/community/qa/htaccess 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,2 @@
+SetEnv PERL5LIB ../scripts/qa
+SetEnv DEBIAN_QA_CONF ../.debianqa/qa.conf
Added: trunk/community/qa/maintainercheck
===================================================================
--- trunk/community/qa/maintainercheck (rev 0)
+++ trunk/community/qa/maintainercheck 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007, 2008
+# Released under the terms of the GNU GPL 2
+
+. $(dirname $0)/commoncheck
+
+DIR=$(mktemp -d)
+
+for P in $(svn ls $REPO/trunk/); do
+ P=${P%/}
+ CONTROL="$DIR/$P.control"
+ svn export $REPO/trunk/$P/debian/control $CONTROL > /dev/null
+ if ! grep "Maintainer: Debian Perl Group <pkg-perl-maintainers at lists\.alioth\.debian\.org>" $CONTROL > /dev/null ; then
+ echo $P
+ egrep "(Maintainer|Uploaders)" $CONTROL | sort
+ echo
+ fi
+ rm $CONTROL
+done
+
+rm -rf $DIR
Property changes on: trunk/community/qa/maintainercheck
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/oldscripts/Common.pm
===================================================================
--- trunk/community/qa/oldscripts/Common.pm (rev 0)
+++ trunk/community/qa/oldscripts/Common.pm 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,106 @@
+# $Id: Common.pm 8974 2007-11-07 15:28:29Z gregoa-guest $
+package Common;
+use strict;
+use Sys::Hostname;
+use base 'Exporter';
+
+our @EXPORT = qw(
+ $SVN_REPO
+ $MIRROR
+ $CPAN_MIRROR
+);
+
+our $SVN_REPO = "svn://svn.debian.org/svn/pkg-perl";
+our $MIRROR = "MIRROR=ftp://ftp.debian.org";
+our $CPAN_MIRROR = "ftp://cpan.org/pub/CPAN";
+
+# special hosts
+for( hostname )
+{
+ # alioth
+ /alioth/ && do {
+ $SVN_REPO = "file:///svn/pkg-perl";
+ $MIRROR = "ftp://ftp.nl.debian.org";
+ $CPAN_MIRROR = "ftp://cpan.wanadoo.nl/pub/CPAN";
+ last;
+ };
+
+ # Gregor
+ /belanna|nerys/ && do {
+ $MIRROR = "ftp://ftp.at.debian.org";
+ $CPAN_MIRROR = "ftp://gd.tuwien.ac.at/pub/CPAN";
+ last;
+ };
+
+ # dam
+ /pc1/ && do {
+ $MIRROR = "http://proxy:9999";
+ $CPAN_MIRROR = "ftp://ftp.uni-sofia.bg/cpan";
+ last;
+ };
+ /beetle/ && do {
+ $MIRROR = "http://localhost:9999";
+ $CPAN_MIRROR = "ftp://ftp.uni-sofia.bg/cpan";
+ last;
+ };
+
+ # Tincho
+ /abraxas/ && do {
+ $MIRROR = "file:///media/IOMega/mirror/";
+ $CPAN_MIRROR = "ftp://cpan.ip.pt/pub/cpan/";
+ last;
+ };
+
+ die "Unknown host $_";
+}
+
+# This mirror is near alioth. From #alioth:
+# <ard> ard at c32791:~$ sudo /usr/sbin/traceroute -A cpan.wanadoo.nl|grep AS1200
+# <ard> traceroute to ftp.wanadoo.nl (194.134.17.10), 64 hops max, 40 byte packets
+# <ard> 5 ams-ix.euro.net (195.69.144.70) [AS1200] 1 ms 1 ms 1 ms
+# <ard> jups
+# <ard> 10G going to as1200
+# <ard> As long as it passes as1200 it's ok... Everything else is $$ :-(
+# CPAN=ftp://cpan.wanadoo.nl/pub/CPAN
+
+use CPAN;
+my $home = $ENV{HOME};
+$CPAN::Config = {
+ 'build_cache' => q[10],
+ 'build_dir' => "$home/.cpan/build",
+ 'cache_metadata' => q[1],
+ 'cpan_home' => "$home/.cpan",
+ 'cpan_version_check' => q[0],
+ 'dontload_hash' => { },
+ 'ftp' => q[],
+ 'ftp_proxy' => q[],
+ 'getcwd' => q[cwd],
+ 'gpg' => q[/usr/bin/gpg],
+ 'gzip' => q[/bin/gzip],
+ 'histfile' => "/dev/null",
+ 'histsize' => q[100],
+ 'http_proxy' => q[],
+ 'inactivity_timeout' => q[0],
+ 'index_expire' => q[1],
+ 'inhibit_startup_message' => q[1],
+ 'keep_source_where' => "$home/.cpan/sources",
+ 'lynx' => q[/usr/bin/lynx],
+ 'make' => q[/usr/bin/make],
+ 'make_arg' => q[],
+ 'make_install_arg' => q[],
+ 'makepl_arg' => q[INSTALLDIRS=site],
+ 'ncftp' => q[],
+ 'ncftpget' => q[],
+ 'no_proxy' => q[],
+ 'pager' => q[/usr/bin/less],
+ 'prerequisites_policy' => q[ignore],
+ 'scan_cache' => q[never],
+ 'shell' => q[/bin/bash],
+ 'tar' => q[/bin/tar],
+ 'term_is_latin' => q[0],
+ 'unzip' => q[],
+ 'urllist' => [ $CPAN_MIRROR ],
+ 'wget' => q[/usr/bin/wget],
+};
+
+1;
Added: trunk/community/qa/oldscripts/versioncheck
===================================================================
--- trunk/community/qa/oldscripts/versioncheck (rev 0)
+++ trunk/community/qa/oldscripts/versioncheck 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,39 @@
+#!/bin/sh
+
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Released under the terms of the GNU GPL 2
+
+. $(dirname $0)/commoncheck
+
+# get basic info
+DIR=$(mktemp -d)
+echo > $DIR/Packages
+for BRANCH in main contrib non-free; do
+ wget -q -O- $MIRROR/debian/dists/unstable/$BRANCH/source/Sources.gz | gzip -d | egrep "^(Package|Version)" >> $DIR/Packages
+done
+
+wget -q -O- http://incoming.debian.org | html2text -nobs -width 255 > $DIR/incoming
+wget -q -O- http://ftp-master.debian.org/new.html | html2text -nobs -width 255 > $DIR/newqueue
+
+# loop over packages
+for P in $(svn ls $REPO/trunk/); do
+ P=$(echo $P | sed -e 's;/;;')
+
+ PA=$(grep -A 1 $P $DIR/Packages | grep Version | perl -pe 's/Version: //')
+ RE=$(svn cat $REPO/trunk/${P}/debian/changelog | grep -m 1 "$P.*unstable;" | perl -pe 's/.*\((.+)\).*/$1/')
+ IN=$(grep "$P.*\.dsc" $DIR/incoming | perl -pe 's/.*_(.+)\.dsc.*/\1/')
+ NE=$(grep $P $DIR/newqueue | perl -pe "s/.*$P\s+([^\s]+)\s+source.*/\1/g")
+
+ if [ "$RE" != "$PA" -a "$RE" != "$IN" -a "$RE" != "$NE" ]; then
+ echo "$P ==>"
+ echo -e "\trepository: $RE"
+ echo -e "\tpackages: $PA"
+ [ -n "$IN" ] && echo -e "\tincoming: $IN"
+ [ -n "$NE" ] && echo -e "\tnew: $NE"
+ fi
+
+done
+
+rm -rf $DIR
+
+exit 0
Property changes on: trunk/community/qa/oldscripts/versioncheck
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/oldscripts/versioncheck-html
===================================================================
--- trunk/community/qa/oldscripts/versioncheck-html (rev 0)
+++ trunk/community/qa/oldscripts/versioncheck-html 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,153 @@
+#!/bin/bash
+
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Released under the terms of the GNU GPL 2
+
+THIS_REVISION='$Id: versioncheck-html 8974 2007-11-07 15:28:29Z gregoa-guest $'
+
+. $(dirname $0)/commoncheck
+
+set -u
+
+# get basic info
+DIR=$(mktemp -d)
+echo > $DIR/Packages
+for BRANCH in main contrib non-free; do
+ wget -q -O- $MIRROR/debian/dists/unstable/$BRANCH/source/Sources.gz | gzip -d | egrep "^(Package|Version)" >> $DIR/Packages
+done
+
+wget -q -O- http://incoming.debian.org | html2text -nobs -width 255 > $DIR/incoming
+wget -q -O- http://ftp-master.debian.org/new.html | html2text -nobs -width 255 > $DIR/newqueue
+wget -q -O- $CPAN/modules/01modules.index.html | html2text -nobs -width 500 > $DIR/cpan
+
+cat <<_EOF
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+ "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+ <title>pkg-perl TODO</title>
+ <style type="text/css">
+ body {
+ background: white;
+ color: black;
+ }
+ table {
+ border: 1px solid black;
+ border-collapse: collapse;
+ }
+ td, th {
+ border: 1px solid black;
+ }
+ .upload {
+ background: lightsalmon;
+ }
+ .upgrade {
+ background: lightblue;
+ }
+ </style>
+</head>
+<body>
+<table>
+<tr><th>Legend</th></tr>
+<tr><td class="upload">Needs uploading</td></tr>
+<tr><td class="upgrade">Needs upgrade from upstream</td></tr>
+</table>
+
+<br>
+
+<table>
+<tr>
+ <th>Package</th>
+ <th>Repository</th>
+ <th>Archive</th>
+ <th>upstream</th>
+</tr>
+_EOF
+
+TOTAL=0
+# loop over packages
+for P in $(svn ls $REPO/trunk/); do
+ P=$(echo $P | sed -e 's;/;;')
+
+ # Get changelog from repo and version + last uploader
+ cat /dev/null > $DIR/changelog
+ svn cat $REPO/trunk/${P}/debian/changelog > $DIR/changelog
+ RE=$(grep -m 1 "$P.*unstable;" $DIR/changelog | perl -pe 's/.*\((.+)\).*/$1/')
+ UL=$(grep -m 1 "^ --" $DIR/changelog | perl -pe 's/^ -- (.+>) (.*)/$1 ($2)/')
+
+ # Get versions in Packages, incoming and NEW
+ PA=$(grep -A 1 $P $DIR/Packages | grep Version | perl -pe 's/Version: //')
+ IN=$(grep "$P.*\.dsc" $DIR/incoming | perl -pe 's/.*_(.+)\.dsc.*/\1/')
+ NE=$(grep $P $DIR/newqueue | perl -pe "s/.*$P\s+([^\s]+)\s+source.*/\1/g")
+
+ # Get the watch file, mangling CPAN URLs to use
+ # our fast mirror
+ cat /dev/null > $DIR/watch
+ svn cat $REPO/trunk/${P}/debian/watch \
+ | sed -e "s!^http://www.cpan.org/!$CPAN/!" \
+ | sed -e "s!^ftp://www.cpan.org/!$CPAN/!" \
+ | sed -e "s!^http://backpan.perl.org/authors/!$CPAN/modules/by-author/!" \
+ | sed -e "s!^http://mirrors.kernel.org/cpan/!$CPAN/!" \
+ | sed -e "s!^ftp://mirrors.kernel.org/cpan/!$CPAN/!" \
+ > $DIR/watch
+ UPCUR=$(echo $RE | perl -pe 's/^(?:\d:)?(.+?)(?:-[^-]+)?$/\1/')
+ UPNEW=""
+ if [ -s $DIR/watch ]; then
+ if egrep -qi '^(ftp|http).+cpan' $DIR/watch; then
+ VER_REGEX=$(egrep '^http|^ftp' $DIR/watch | perl -pe 's{.+/\s*}{}; s/\s.*$//')
+ if [ -n "$VER_REGEX" ]; then
+ UPNEW=$(perl -ne "if(/\\s${VER_REGEX}\\s/){ \$last_ver = \$1 if \$1 > \$last_ver; } END { print \$last_ver } " $DIR/cpan)
+ else
+ UPNEW="Invalid debian/watch"
+ fi
+ fi
+
+ # Either unknown watch URL or a module without
+ # meta-information; fall back to uscan
+ if [ -z "$UPNEW" ]; then
+ UPNEW=$(uscan --watchfile $DIR/watch --upstream-version $UPCUR --package $P --report-status | perl -ne 'print if s/Newest version on remote site is (.+),.*/\1/')
+ fi
+ else
+ if echo "$RE" | egrep -q -- '-.+$'; then
+ UPNEW="no debian/watch"
+ else
+ # native package
+ UPNEW=$RE
+ fi
+ fi
+
+ if [ "$UPCUR" != "$UPNEW" -o "$RE" != "$PA" -a "$RE" != "$IN" -a "$RE" != "$NE" ]; then
+ echo "<tr>"
+ echo "<td><a href=\"http://packages.qa.debian.org/$P\">$P</a> <span style=\"font-size: smaller\">[<a href=\"http://bugs.debian.org/src:$P\">BTS</a>]</span></td>"
+ echo -n "<td title='$UL'"
+ if [ "$RE" != "$PA" ] ; then
+ echo -n " class='upload'"
+ fi
+ echo ">${RE:--}</td>"
+ echo "<td>${PA:--}"
+ [ -n "$IN" ] && echo "Incoming: $IN"
+ [ -n "$NE" ] && echo "NEW: $NE"
+ echo "</td>"
+ if [ "$UPCUR" != "$UPNEW" ] ; then
+ echo "<td class='upgrade'>${UPNEW:-No upstream sources!?}</td>"
+ else
+ echo "<td> </td>"
+ fi
+ echo "</tr>"
+
+ TOTAL=$(( $TOTAL + 1 ))
+ fi
+
+done
+
+echo "<tr><td colspan=\"4\"><b>TOTAL: $TOTAL</b></td></tr>"
+echo "</table>"
+echo "<hr>"
+LANG=C date
+echo "<br><i>$THIS_REVISION</i>"
+echo "</body>"
+
+rm -rf $DIR
+
+exit 0
Property changes on: trunk/community/qa/oldscripts/versioncheck-html
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/oldscripts/versioncheck.pl
===================================================================
--- trunk/community/qa/oldscripts/versioncheck.pl (rev 0)
+++ trunk/community/qa/oldscripts/versioncheck.pl 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,821 @@
+#!/usr/bin/perl -w
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Released under the terms of the GNU GPL 2
+
+### TODO ###
+#
+# Try harder to use 02packages.details.gz for authoritative CPAN
+# version source, regardless of whether debian/watch uses by-module URL
+# or by-author one
+#
+# Use AptPkg::Version for
+# - version comparison
+# - stripping debian revision off from a version
+
+our $THIS_REVISION = '$Id: versioncheck.pl 8974 2007-11-07 15:28:29Z gregoa-guest $';
+
+BEGIN {
+ my $self_dir = $0;
+ $self_dir =~ s{/[^/]+$}{};
+ unshift @INC, $self_dir;
+};
+
+use strict;
+use Common;
+use LWP::Simple ();
+use Compress::Zlib ();
+use HTML::TableExtract;
+use SVN::Client;
+use SVN::Core;
+use IO::Scalar;
+use Parse::DebianChangelog;
+use Getopt::Long;
+
+our $opt_debug = 0;
+
+GetOptions(
+ 'debug!' => \$opt_debug,
+);
+
+sub debugmsg(@)
+{
+ warn @_ if $opt_debug;
+};
+
+
+# Get some information globally
+
+use Storable();
+use LWP::UserAgent;
+
+debugmsg( "CPAN mirror is $CPAN_MIRROR\n" );
+debugmsg( "HOME=$ENV{HOME}\n" );
+
+sub from_cache($$$)
+{
+ my( $ref, $name, $max_age) = @_;
+
+ my $dir = $ENV{HOME}.'/.dpg/versioncheck';
+
+ return undef unless -f "$dir/$name" and -M(_) <= $max_age/24;
+
+ my $data = Storable::retrieve("$dir/$name");
+ return undef unless $data;
+
+ debugmsg("$name loaded from cache (".scalar(keys(%$data)).")\n");
+
+ %$ref = %$data;
+ return 1;
+}
+
+sub to_cache($$)
+{
+ my( $ref, $name) = @_;
+
+ my $home = $ENV{HOME};
+
+ -d "$home/.dpg" or mkdir("$home/.dpg") or die $!;
+ -d "$home/.dpg/versioncheck" or mkdir("$home/.dpg/versioncheck") or die $!;
+
+ Storable::store($ref, "$home/.dpg/versioncheck/$name");
+}
+
+sub scan_packages($$)
+{
+ my( $suite, $hash ) = @_;
+ foreach my $section ( qw( main contrib non-free ) )
+ {
+ # TODO This is somewhat brute-force, reading the whole sources into
+ # memory, then de-compressing them also in memory.
+ # Should be made incremental using reasonable-sized buffer
+ my $url = "$MIRROR/debian/dists/$suite/$section/source/Sources.gz";
+ my $sources_gz = LWP::Simple::get($url);
+ $sources_gz or die "Can't download $url";
+ my $sources = Compress::Zlib::memGunzip(\$sources_gz);
+ my $src_io = IO::Scalar->new(\$sources);
+
+ my $pkg;
+ while( <$src_io> )
+ {
+ chomp;
+ if( s/^Package: // )
+ {
+ $pkg = $_;
+ next;
+ }
+
+ if( s/^Version: // )
+ {
+ $hash->{$pkg} = $_;
+ }
+ }
+ }
+
+ debugmsg(
+ sprintf(
+ "Information about %d %s packages loaded\n",
+ scalar(keys(%$hash)),
+ $suite,
+ ),
+ );
+ to_cache($hash, $suite);
+}
+
+my %packages; # contains {package => version} pairs
+scan_packages(
+ 'unstable', \%packages,
+) unless from_cache(\%packages, 'unstable', 6);
+
+my %experimental; # contains {package => version} pairs
+scan_packages(
+ 'experimental', \%experimental,
+) unless from_cache(\%experimental, 'experimental', 6);
+
+my %stable; # contains {package => version} pairs
+scan_packages(
+ 'stable', \%stable,
+) unless from_cache(\%stable, 'stable', 168); # 1 week
+
+my %oldstable; # contains {package => version} pairs
+scan_packages(
+ 'oldstable', \%oldstable,
+) unless from_cache(\%oldstable, 'oldstable', 168); # 1 week
+
+
+my %incoming; # contains {package => version} pairs
+do {
+ my $incoming = LWP::Simple::get('http://incoming.debian.org')
+ or die "Unable to retreive http://incoming.debian.org";
+ my $inc_io = IO::Scalar->new(\$incoming);
+ while( <$inc_io> )
+ {
+ chomp;
+ next unless /a href="([^_]+)_(.+)\.dsc"/;
+
+ $incoming{$1} = $2;
+ }
+};
+debugmsg( sprintf("Information about %d incoming packages loaded\n", scalar(keys(%incoming))) );
+
+my %new; # contains {package => version} pairs
+do {
+ my $new = LWP::Simple::get('http://ftp-master.debian.org/new.html');
+ my $te = HTML::TableExtract->new(
+ headers=> [
+ qw(Package Version Arch Distribution Age Maintainer Closes)
+ ],
+ );
+ $te->parse($new);
+ foreach my $table( $te->tables )
+ {
+ foreach my $row( $table->rows )
+ {
+ next unless $row->[2] =~ /source/;
+
+ my @versions = split(/\n/, $row->[1]);
+ s/<br>// foreach @versions;
+
+ $new{$row->[0]} = $versions[-1];# use the last uploaded version
+ }
+ }
+};
+debugmsg( sprintf("Information about %d NEW packages loaded\n", scalar(keys(%new))) );
+
+my %cpan_authors;
+my %cpan_modules;
+do {
+ open(TMP, '+>', undef) or die "Unable to open anonymous temporary file";
+ my $old = select(TMP);
+ my $lslr = LWP::Simple::getprint("$CPAN_MIRROR/ls-lR.gz");
+ select($old);
+ seek(TMP, 0, 0);
+ my $gz = Compress::Zlib::gzopen(\*TMP, 'rb') or die $Compress::Zlib::gzerrno;
+
+ my $storage;
+ my ($section, $path);
+ while( $gz->gzreadline($_) )
+ {
+ chomp;
+ next unless $_;
+
+ if( m{^\./authors/id/(.+):} )
+ {
+ $storage = $cpan_authors{$1} ||= [];
+ }
+ elsif( m{^\./modules/by-module/(.+):} )
+ {
+ $storage = $cpan_modules{$1} ||= [];
+ }
+ elsif( m{\..*:} )
+ {
+ undef($storage);
+ }
+ else
+ {
+ next unless $storage;
+
+ my(
+ $perm, $ln, $o, $g, $size, $month, $day, $time, $what, $where,
+ ) = split(/\s+/);
+
+ next unless $what and $what =~ /(?:tar\.gz|\.tgz|\.zip|\.tar\.bz2|\.tbz)$/;
+
+ push @$storage, $what;
+ }
+ }
+ close(TMP);
+
+ to_cache(\%cpan_modules, 'cpan_modules');
+ to_cache(\%cpan_authors, 'cpan_authors');
+} unless from_cache(\%cpan_authors, 'cpan_authors', 12)
+ and from_cache(\%cpan_modules, 'cpan_modules', 12);
+
+
+# RETURNS
+# 1 if first version is bigger
+# 0 if both versions are equal
+# -1 if second version is bigger
+sub cmp_ver($$)
+{
+ my($a,$b) = @_;
+
+ while( $a and $b )
+ {
+ $a =~ s/^(\w*)//; my $a_w = $1||'';
+ $b =~ s/^(\w*)//; my $b_w = $1||'';
+
+ my $r = $a_w cmp $b_w;
+
+ return $r if $r;
+
+ $a =~ s/^(\d*)//; my $a_d = (defined($1) and $1 ne '') ? $1 : -1;
+ $b =~ s/^(\d*)//; my $b_d = (defined($1) and $1 ne '') ? $1 : -1;
+
+ $r = $a_d <=> $b_d;
+
+ return $r if $r;
+
+ $a =~ s/^(\D*)//; my $a_nd = $1||'';
+ $b =~ s/^(\D*)//; my $b_nd = $1||'';
+
+ $r = $a_nd cmp $b_nd;
+
+ return $r if $r;
+ }
+ return 1 if $a;
+ return -1 if $b;
+ return 0;
+}
+
+sub unmangle( $ $ )
+{
+ my( $ver, $mangles ) = @_;
+
+ return $ver unless $mangles;
+
+ my @vms = map( split(/;/, $_), @$mangles );
+
+ foreach my $vm( @vms )
+ {
+ eval "\$ver =~ $vm";
+ die "<<\$_ =~ $vm>> $@" if $@;
+ debugmsg(" mangled: $ver\n");
+ }
+
+ return $ver;
+}
+
+# RETURNS undef if all watch files point to CPAN
+sub latest_upstream_from_watch($)
+{
+ my ($watch) = @_;
+
+ my @vers;
+
+ foreach(@$watch)
+ {
+ my( $wline, $opts ) = @$_;
+
+ $wline =~ m{^(http://\S+)/};
+ my $url = $1;
+ $url =~ s{^http://sf.net/}{http://sf.net.projects/};
+
+ $wline =~ s{^http://sf\.net/(\S+)}{http://qa.debian.org/watch/sf.php/$1};
+ if( $wline =~ m{
+ ^((?:http|ftp)://\S*?) # http://server/some/path - captured
+ # non-greedy to not eat up the pattern
+ (?:/\s*|\s+) # delimiter - '/' for ver3 or space for ver2
+ ([^\s/]+) # the search pattern - no spaces, no slashes - captured
+ (?:
+ (?!.*\() # followed by non-(search pattern)
+ |
+ \s*$ # or EOL
+ )
+ }ix )
+ {
+ my( $dir, $filter ) = ($1, $2);
+ debugmsg( " uscan $dir $filter\n" );
+ $url ||= $dir;
+ my $page = LWP::Simple::get($dir) or return "Unable to get $dir (".__LINE__.")";
+ my $page_io = IO::Scalar->new(\$page);
+ while( <$page_io> )
+ {
+ warn $_ if 1;
+
+ if( $dir =~ /^http/ )
+ {
+ while( s/<a [^>]*href="([^"]+)"[^>]*>//i )
+ {
+ my $href = $1;
+ push @vers, [
+ unmangle( $1, $opts->{uversionmangle} ),
+ $url,
+ ] if $href =~ $filter;
+ }
+ }
+ else
+ {
+ while( s/(?:^|\s+)$filter(?:\s+|$)// )
+ {
+ push @vers, [
+ unmangle( $1, $opts->{uversionmangle} ),
+ $url,
+ ];
+ }
+ }
+ }
+ }
+ else
+ {
+ return "bad watch URL $wline";
+ }
+ }
+
+ @vers = sort { cmp_ver($a->[0],$b->[0]) } @vers;
+
+ my $ver = $vers[-1] || '';
+ my $url;
+
+ ($ver, $url) = $ver ? @$ver : (undef, undef);
+
+ return wantarray ? ($ver, $url) : $ver;
+}
+
+# returns array of [ver, path]
+sub cpan_versions($$$)
+{
+ my($where, $wline, $opts) = @_;
+
+ $wline =~ m{
+ ^(\S*?) # some/path - captured
+ # non-greedy to not eat up the pattern
+ (?:/\s*|\s+) # delimiter - '/' for ver3 or space for ver2
+ ([^\s/]+) # the search pattern - no spaces, no slashes - captured
+ (?!.*\() # not followed by search pattern
+ }ix;
+ my( $key, $filter) = ($1, $2);
+ debugmsg( sprintf( " module search %s %s\n", $key, $filter ) );
+
+ my $list = $where->{$key};
+ unless($list)
+ {
+ debugmsg("directory $key not found (from $wline) [".__LINE__."]\n");
+ return();
+ }
+
+ my @vers;
+ foreach(@$list)
+ {
+ if( $_ =~ $filter )
+ {
+ debugmsg(" looking at $_\n") if 1;
+ my $ver = unmangle( $1, $opts->{uversionmangle} );
+ push @vers, [$ver, $key];
+ }
+ }
+
+ return @vers;
+}
+
+# returns (version, URL)
+sub latest_upstream_from_cpan($)
+{
+ my ($watch) = @_;
+
+ my @cpan = grep( $_->[0] =~ m{(?:^|\s)(?:http|ftp)://\S*cpan}i, @$watch );
+
+ return undef unless @cpan;
+
+ my @vers;
+
+ foreach(@cpan)
+ {
+ my( $wline, $opts ) = @$_;
+ if( $wline =~ s{^(?:http|ftp)://\S*cpan\S*/modules/by-module/}{}i )
+ {
+ # lookup by module
+ push @vers, map(
+ [ $_->[0], "http://www.cpan.org/modules/by-module/".$_->[1] ],
+ cpan_versions(\%cpan_modules, $wline, $opts),
+ );
+ }
+ elsif( $wline =~ s{^(?:http|ftp)://\S*cpan\S*/authors/(?:by-)?id/}{}i
+ or
+ $wline =~ s{^(?:http|ftp)://\S*cpan\S*/(?:by-)?authors/id/}{}i
+ )
+ {
+ # lookup by author
+ push @vers, map(
+ [ $_->[0], "http://www.cpan.org/authors/id/".$_->[1] ],
+ cpan_versions(\%cpan_authors, $wline, $opts),
+ );
+ }
+ else
+ {
+ debugmsg( sprintf( " can't determine type of search for %s\n", $wline ) );
+ return undef;
+ }
+ }
+
+ @vers = sort { cmp_ver($a->[0],$b->[0]) } @vers;
+
+ my $ver = $vers[-1];
+ my $url;
+ if( $ver )
+ {
+ ($ver, $url) = @$ver;
+ }
+ else
+ {
+ undef($ver); undef($url);
+ }
+
+ return wantarray ? ($ver, $url) : $ver;
+}
+
+sub unmangle_debian_version($$)
+{
+ my($ver, $watch) = @_;
+
+ foreach( @$watch )
+ {
+ my $dvm = $_->[1]->{dversionmangle} if $_->[1];
+ $dvm ||= [];
+
+ do {
+ eval "\$ver =~ $_";
+ die "\$ver =~ $dvm -> $@" if $@;
+ } foreach @$dvm;
+ }
+
+ return $ver;
+}
+
+
+print <<_EOF;
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+ "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ <title>pkg-perl package versions</title>
+ <style type="text/css">
+ body {
+ background: white;
+ color: black;
+ }
+ table {
+ border: 1px solid black;
+ border-collapse: collapse;
+ empty-cells: show;
+ }
+ td, th {
+ border: 1px solid black;
+ }
+ .upload {
+ background: lightsalmon;
+ }
+ .upgrade {
+ background: lightblue;
+ }
+ </style>
+</head>
+<body>
+<table>
+<tr><th>Legend</th></tr>
+<tr><td class="upload">Needs uploading</td></tr>
+<tr><td class="upgrade">Needs upgrade from upstream</td></tr>
+</table>
+
+<br>
+
+<table>
+<tr>
+ <th>Package</th>
+ <th>Repository</th>
+ <th>Archive</th>
+ <th>upstream</th>
+</tr>
+_EOF
+
+my $total = 0;
+my $total_shown = 0;
+my $svn = SVN::Client->new();
+
+sub check_package($)
+{
+ my( $dir ) = @_;
+
+ debugmsg( "Examining $dir\n" );
+
+ my $pkg = "";
+ my $changelog = "";
+
+ my $in_svn = 'Unknown SVN version';
+ my $svn_changer = "";
+ my $svn_date = "";
+ my $svn_error;
+ my $svn = SVN::Client->new();
+ {
+ my $changelog_fh = IO::Scalar->new( \$changelog );
+ local $SVN::Error::handler = undef;
+ ($svn_error) = $svn->cat(
+ $changelog_fh,
+ "$SVN_REPO/trunk/$dir/debian/changelog",
+ 'HEAD',
+ );
+ }
+ if(SVN::Error::is_error($svn_error))
+ {
+ if($svn_error->apr_err() == $SVN::Error::FS_NOT_FOUND)
+ {
+ $in_svn = 'Missing debian/changelog';
+ $svn_error->clear();
+ }
+ else
+ {
+ SVN::Error::croak_on_error($svn_error);
+ }
+ }
+ my @cl;
+ if($changelog) {
+ @cl = Parse::DebianChangelog->init({instring=>$changelog})->data;
+ }
+ foreach( @cl )
+ {
+ next unless $_->Distribution eq 'unstable';
+ next if $_->Changes =~ /NOT RELEASED/;
+
+ $in_svn = $_->Version;
+ $svn_changer = $_->Maintainer;
+ $svn_date = $_->Date;
+ $pkg = $_->Source;
+ last;
+ }
+
+ my $in_archive = $packages{$pkg} || '';
+ debugmsg( sprintf(" - Archive has %s\n", $in_archive||'none') );
+
+ my $in_experimental = $experimental{$pkg};
+ debugmsg( sprintf( " - experimental has %s\n", $in_experimental||'none' ) );
+
+ my $in_stable = $stable{$pkg};
+ debugmsg( sprintf( " - stable has %s\n", $in_stable||'none' ) );
+
+ my $in_oldstable = $oldstable{$pkg};
+ debugmsg( sprintf( " - oldstable has %s\n", $in_oldstable||'none' ) );
+
+
+ my $upstream = '';
+ my $upstream_is_cpan;
+ my $in_cpan = '';
+ my $upstream_url;
+ my @watch;
+ my $watch;
+ {
+ my $watch_io = IO::Scalar->new(\$watch);
+ local $SVN::Error::handler = undef;
+ ($svn_error) = $svn->cat(
+ $watch_io,
+ "$SVN_REPO/trunk/$dir/debian/watch",
+ 'HEAD',
+ );
+ $watch_io->close();
+ }
+ if(SVN::Error::is_error($svn_error))
+ {
+ if($svn_error->apr_err() == $SVN::Error::FS_NOT_FOUND)
+ {
+ $upstream = (
+ ( $in_svn =~ /-.+$/ )
+ ? 'Missing debian/watch'
+ : $in_svn # native package
+ );
+ $svn_error->clear();
+ $watch = "";
+ }
+ else
+ {
+ SVN::Error::croak_on_error($svn_error);
+ }
+ }
+
+ $watch =~ s/\\\n//gs;
+ my @watch_lines = split(/\n/, $watch) if $watch;
+
+ @watch_lines = grep( (!/^#/ and !/^version=/ and !/^\s*$/), @watch_lines );
+
+ foreach(@watch_lines)
+ {
+ debugmsg( " watch line $_\n" ) if 0;
+ # opts either contain no spaces, or is enclosed in double-quotes
+ my $opts = $1 if s!^\s*opts="([^"]*)"\s+!! or s!^\s*opts=(\S*)\s+!!;
+ debugmsg( " watch options = $opts\n" ) if $opts;
+ # several options are separated by comma and commas are not allowed within
+ my @opts = split(/\s*,\s*/, $opts) if $opts;
+ my %opts;
+ foreach(@opts)
+ {
+ next if /^(?:active|passive|pasv)$/;
+
+ /([^=]+)=(.*)/;
+ debugmsg( " watch option $1 = $2\n" );
+ if( $1 eq 'versionmangle' )
+ {
+ push @{ $opts{uversionmangle} }, $2;
+ push @{ $opts{dversionmangle} }, $2;
+ }
+ else
+ {
+ push @{ $opts{$1} }, $2;
+ }
+ }
+ s!^http://www.cpan.org/!$CPAN_MIRROR/!;
+ s!^ftp://www.cpan.org/!$CPAN_MIRROR/!;
+ s!^http://backpan.perl.org/authors/!$CPAN_MIRROR/authors/!;
+ s!^http://mirrors.kernel.org/cpan/!$CPAN_MIRROR/!;
+ s!^ftp://mirrors.kernel.org/cpan/!$CPAN_MIRROR/!;
+
+ push @watch, [ $_, \%opts ];
+ }
+
+ my $up_svn = $in_svn;
+ $up_svn =~ s/^(?:\d+:)?(.+?)(?:-[^-]+)?$/$1/ if $up_svn;
+ $up_svn = unmangle_debian_version($up_svn, \@watch) if @watch;
+ debugmsg(
+ sprintf(
+ " - SVN has %s (upstream version=%s)\n",
+ $in_svn||'none',
+ $up_svn||'none',
+ )
+ );
+
+ if( @watch )
+ {
+ ($in_cpan, $upstream_url) = latest_upstream_from_cpan(\@watch);
+ debugmsg( sprintf( " - CPAN has %s (%s)\n", $in_cpan||'none', $upstream_url||'no url' ) );
+ if( $in_cpan )
+ {
+ $upstream_is_cpan = 1;
+ $upstream = $in_cpan;
+ }
+ else
+ {
+ ($upstream, $upstream_url) = latest_upstream_from_watch(\@watch);
+ }
+ debugmsg( sprintf( " - upstream has %s (%s)\n", $upstream||'none', $upstream_url||'no url' ) );
+ }
+ else
+ {
+ $upstream ||= (
+ ( $in_svn =~ /-.+$/ )
+ ? qq(Invalid <a href="http://svn.debian.org/wsvn/pkg-perl/trunk/$dir/debian/watch?op=file&rev=0&sc=0">debian/watch</a>)
+ : $in_svn # native package
+ );
+ }
+
+
+ my $in_incoming = $incoming{$pkg}||'';
+ debugmsg( sprintf( " - incoming has %s\n", $in_incoming||'none' ) );
+ my $in_new = $new{$pkg}||'';
+ debugmsg( sprintf( " - NEW has %s\n", $in_new||'none' ) );
+
+
+
+ if( $up_svn ne $upstream
+ or
+ $in_svn ne $in_archive
+ and
+ $in_svn ne $in_incoming
+ and
+ $in_svn ne $in_new
+ )
+ {
+ print "<tr>\n";
+ print "<td>".(
+ ($in_archive)
+ ? qq(<a href="http://packages.qa.debian.org/$pkg">$pkg</a>)
+ : qq($pkg)
+ )."</td>\n";
+
+ my $in_svn_text = qq(<a href="http://svn.debian.org/wsvn/pkg-perl/trunk/$dir/debian/changelog?op=file&rev=0&sc=0" title="$svn_changer\n$svn_date">$in_svn</a>);
+ print "<td".(
+ ($in_svn ne $in_archive)
+ ? ' class="upload"'
+ : ''
+ ).">$in_svn_text</td>\n";
+
+ my $archive_text = join(
+ "\n",
+ $in_archive||(),
+ (
+ ($in_incoming)
+ ? "Incoming: $in_incoming"
+ : ()
+ ),
+ (
+ ($in_new)
+ ? "NEW: $in_new"
+ : ()
+ ),
+ (
+ ($in_experimental)
+ ? "experimental: $in_experimental"
+ : ()
+ ),
+ (
+ ($in_stable and not $in_archive and not $in_experimental)
+ ? "stable: $in_stable"
+ : ()
+ ),
+ (
+ ($in_oldstable and not $in_stable and not $in_archive and not $in_experimental)
+ ? "oldstable: $in_oldstable"
+ : ()
+ ),
+ );
+
+ $archive_text = qq(<a href="http://packages.qa.debian.org/$pkg">$archive_text</a> [<a style="font-size:smaller" href="http://bugs.debian.org/src:$pkg">BTS</a>]) if $in_archive or $in_experimental or $in_stable or $in_oldstable;
+
+ print "<td>$archive_text</td>\n";
+
+ my $upstream_text = (
+ $upstream_is_cpan
+ ? "CPAN: $in_cpan"
+ : $upstream
+ );
+ $upstream_text = qq(<a href="$upstream_url">$upstream_text</a>) if $upstream_url;
+
+ print(
+ ($up_svn ne $upstream)
+ ? qq(<td class="upgrade">$upstream_text</td>\n)
+ : "<td></td>\n"
+ );
+ print "</tr>\n";
+
+ return 1;
+ }
+
+ return 0;
+}
+
+my @pkgs_to_check;
+if( @ARGV )
+{
+ @pkgs_to_check = @ARGV;
+}
+else
+{
+# loop over packages
+ my $svn_packages = $svn->ls("$SVN_REPO/trunk", 'HEAD', 0);
+
+ debugmsg(
+ sprintf(
+ "%d entries in trunk\n",
+ scalar(keys(%$svn_packages)),
+ ),
+ );
+ @pkgs_to_check = sort(keys %$svn_packages);
+}
+foreach my $pkg( @pkgs_to_check )
+{
+ $total++;
+
+ $total_shown++ if check_package($pkg);
+}
+
+my $date = gmtime;
+print <<_EOF;
+<tr><td colspan=\"4\"><b>TOTAL: $total_shown/$total</b></td></tr>
+</table>
+<hr>
+$date UTC<br>
+<i>$THIS_REVISION</i>
+</body>
+_EOF
+
+
+exit 0
+
+# vim: et:sts=4:ai:sw=4
Property changes on: trunk/community/qa/oldscripts/versioncheck.pl
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/oldscripts/versioncheck2.pl
===================================================================
--- trunk/community/qa/oldscripts/versioncheck2.pl (rev 0)
+++ trunk/community/qa/oldscripts/versioncheck2.pl 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,1054 @@
+#!/usr/bin/perl -w
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+### TODO ###
+#
+# Try harder to use 02packages.details.gz for authoritative CPAN
+# version source, regardless of whether debian/watch uses by-module URL
+# or by-author one
+#
+# Use AptPkg::Version for
+# - version comparison
+# - stripping debian revision off from a version
+
+our $THIS_REVISION = '$Id: versioncheck2.pl 8974 2007-11-07 15:28:29Z gregoa-guest $';
+
+BEGIN {
+ my $self_dir = $0;
+ $self_dir =~ s{/[^/]+$}{};
+ unshift @INC, $self_dir;
+};
+
+use strict;
+use Carp qw(confess);
+use Common;
+use LWP::Simple ();
+use Compress::Zlib ();
+use HTML::TableExtract;
+use SVN::Client;
+use SVN::Core;
+use IO::Scalar;
+use Parse::DebianChangelog;
+use Getopt::Long;
+use File::Path;
+
+our $opt_debug = 0;
+my $force_cpan = 0;
+my $force_rescan = 0;
+my @pkg_rescan = ();
+our $CACHEDIR = "$ENV{HOME}/.dpg/versioncheck";
+our $svn = SVN::Client->new();
+
+GetOptions(
+ 'debug!' => \$opt_debug,
+ 'force-cpan!' => \$force_cpan,
+ 'force-rescan!' => \$force_rescan,
+ 'rescan=s' => \@pkg_rescan,
+ 'cache-dir=s' => \$CACHEDIR
+);
+
+sub debugmsg(@)
+{
+ warn @_ if $opt_debug;
+};
+
+mkpath $CACHEDIR;
+my $lockfile = "$CACHEDIR/.lock";
+if(-e $lockfile) {
+ if(-M $lockfile > 1/24) { # 1 hour
+ debugmsg("Stale lock file -- deleting\n");
+ unlink $lockfile or die $!;
+ } else {
+ die("Other instance of $0 is running!\n");
+ }
+}
+$SIG{HUP} = $SIG{INT} = $SIG{QUIT} = \&sighandler;
+$SIG{SEGV} = $SIG{PIPE} = $SIG{TERM} = \&sighandler;
+$SIG{__DIE__} = \&diehandler;
+open(LOCK, ">", $lockfile) or die $!;
+close(LOCK) or die $!;
+
+# Get some information globally
+
+use Storable();
+use LWP::UserAgent;
+
+debugmsg( "CPAN mirror is $CPAN_MIRROR\n" );
+debugmsg( "The cache is in $CACHEDIR\n" );
+
+sub diehandler
+{
+ die @_ if($^S); # eval
+ debugmsg("Removing lockfile...\n");
+ unlink $lockfile;
+ die @_;
+}
+sub sighandler
+{
+ my $sig = shift;
+ warn "Caught $sig signal...\n";
+ debugmsg("Removing lockfile...\n");
+ unlink $lockfile;
+ # signal myself again
+ $SIG{$sig} = "DEFAULT";
+ kill $sig, $$;
+}
+sub from_cache($$$)
+{
+ my( $ref, $name, $max_age) = @_;
+
+ my $dir = $CACHEDIR;
+
+ return undef unless -f "$dir/$name" and -M(_) <= $max_age/24;
+
+ my $data = Storable::retrieve("$dir/$name");
+ return undef unless $data;
+
+ debugmsg("$name loaded from cache (".scalar(keys(%$data)).")\n");
+
+ %$ref = %$data;
+ return 1;
+}
+
+sub to_cache($$)
+{
+ my( $ref, $name) = @_;
+
+ Storable::store($ref, "$CACHEDIR/$name");
+}
+
+sub scan_packages($$)
+{
+ my( $suite, $hash ) = @_;
+ foreach my $section ( qw( main contrib non-free ) )
+ {
+ # TODO This is somewhat brute-force, reading the whole sources into
+ # memory, then de-compressing them also in memory.
+ # Should be made incremental using reasonable-sized buffer
+ my $url = "$MIRROR/debian/dists/$suite/$section/source/Sources.gz";
+ my $sources_gz = LWP::Simple::get($url);
+ unless($sources_gz) {
+ warn "Can't download $url";
+ return 0;
+ }
+ my $sources = Compress::Zlib::memGunzip(\$sources_gz);
+ my $src_io = IO::Scalar->new(\$sources);
+
+ my $pkg;
+ while( <$src_io> )
+ {
+ chomp;
+ if( s/^Package: // )
+ {
+ $pkg = $_;
+ next;
+ }
+
+ if( s/^Version: // )
+ {
+ $hash->{$pkg} = $_;
+ }
+ }
+ }
+
+ debugmsg(
+ sprintf(
+ "Information about %d %s packages loaded\n",
+ scalar(keys(%$hash)),
+ $suite,
+ ),
+ );
+ to_cache($hash, $suite);
+ 1;
+}
+
+my %packages; # contains {package => version} pairs
+unless(from_cache(\%packages, 'unstable', 6)) {
+ scan_packages('unstable', \%packages)
+ or from_cache(\%packages, 'unstable', 999) or die;
+}
+
+my %experimental; # contains {package => version} pairs
+unless(from_cache(\%experimental, 'experimental', 6)) {
+ scan_packages('experimental', \%experimental)
+ or from_cache(\%experimental, 'experimental', 999) or die;
+}
+
+my %stable; # contains {package => version} pairs
+unless(from_cache(\%stable, 'stable', 168)) {
+ scan_packages('stable', \%stable)
+ or from_cache(\%stable, 'stable', 999) or die;
+}
+
+my %oldstable; # contains {package => version} pairs
+unless(from_cache(\%oldstable, 'oldstable', 168)) {
+ scan_packages('oldstable', \%oldstable)
+ or from_cache(\%oldstable, 'oldstable', 999) or die;
+}
+
+my %incoming; # contains {package => version} pairs
+unless(from_cache(\%incoming, 'incoming', 1)) {
+ scan_incoming(\%incoming)
+ or from_cache(\%incoming, 'incoming', 999) or die;
+}
+
+my %new; # contains {package => version} pairs
+unless(from_cache(\%new, 'new', 1)) {
+ scan_new(\%new)
+ or from_cache(\%new, 'new', 999) or die;
+}
+
+my( %cpan_authors, %cpan_modules, %cpan_dists, $cpan_updated );
+unless(not $force_cpan
+ and from_cache(\%cpan_authors, 'cpan_authors', 12)
+ and from_cache(\%cpan_modules, 'cpan_modules', 12)
+ and from_cache(\%cpan_dists, 'cpan_dists', 12))
+{
+ if(scan_cpan(\%cpan_authors, \%cpan_modules, \%cpan_dists)) {
+ $cpan_updated = 1;
+ } else {
+ from_cache(\%cpan_authors, 'cpan_authors', 999) or die;
+ from_cache(\%cpan_modules, 'cpan_modules', 999) or die;
+ from_cache(\%cpan_dists, 'cpan_dists', 999) or die;
+ }
+}
+
+sub scan_incoming {
+ my $inchash = shift;
+ my $incoming = LWP::Simple::get('http://incoming.debian.org')
+ or die "Unable to retreive http://incoming.debian.org";
+ my $inc_io = IO::Scalar->new(\$incoming);
+ while( <$inc_io> )
+ {
+ chomp;
+ next unless /a href="([^_]+)_(.+)\.dsc"/;
+
+ $inchash->{$1} = $2;
+ }
+ to_cache($inchash, "incoming");
+ debugmsg( sprintf("Information about %d incoming packages loaded\n",
+ scalar(keys(%$inchash))) );
+};
+
+sub scan_new {
+ my $newhash = shift;
+ my $new = LWP::Simple::get('http://ftp-master.debian.org/new.html');
+ my $te = HTML::TableExtract->new(
+ headers=> [
+ qw(Package Version Arch Distribution Age Maintainer Closes)
+ ],
+ );
+ $te->parse($new);
+ foreach my $table( $te->tables )
+ {
+ foreach my $row( $table->rows )
+ {
+ next unless $row->[2] =~ /source/;
+
+ my @versions = split(/\n/, $row->[1]);
+ s/<br>// foreach @versions;
+
+ $newhash->{$row->[0]} = $versions[-1];# use the last uploaded version
+ }
+ }
+ to_cache($newhash, "new");
+ debugmsg( sprintf("Information about %d NEW packages loaded\n",
+ scalar(keys(%$newhash))) );
+}
+
+sub scan_cpan {
+ my( $cpauth, $cpmod, $cpdist ) = @_;
+ open(TMP, '+>', undef) or die "Unable to open anonymous temporary file";
+ my $old = select(TMP);
+ my $lslr = LWP::Simple::getprint("$CPAN_MIRROR/ls-lR.gz");
+ unless(-s TMP) {
+ close TMP;
+ return 0;
+ }
+ select($old);
+ seek(TMP, 0, 0);
+ my $gz = Compress::Zlib::gzopen(\*TMP, 'rb') or die $Compress::Zlib::gzerrno;
+
+ my $storage;
+ my ($section, $path);
+ while( $gz->gzreadline($_) )
+ {
+ chomp;
+ next unless $_;
+
+ # catch dist
+ if( m{
+ \s # blank
+ ( # $1 will capture the whole file name
+ (\S+?) # dist name - in $2
+ - # separator - dash
+ v? # optional 'v' before the version
+ (?: # version
+ \d # starts with a digit
+ [\d._]+ # followed by digits, periods and underscores
+ )
+ (?: # file extension
+ \.tar # .tar
+ (?:\.gz)? # most probably followed with .gz
+ | \.zip # yeah, that ugly OS is not wiped yet
+ )
+ )$}x # and this finishes the line
+ )
+ {
+ $cpdist->{$2} ||= [];
+ push @{ $cpdist->{$2} }, $1;
+ }
+
+ if( m{^\./authors/id/(.+):} )
+ {
+ $storage = $cpauth->{$1} ||= [];
+ }
+ elsif( m{^\./modules/by-module/(.+):} )
+ {
+ $storage = $cpmod->{$1} ||= [];
+ }
+ elsif( m{\..*:} )
+ {
+ undef($storage);
+ }
+ else
+ {
+ next unless $storage;
+
+ my(
+ $perm, $ln, $o, $g, $size, $month, $day, $time, $what, $where,
+ ) = split(/\s+/);
+
+ next unless $what and $what =~ /(?:tar\.gz|\.tgz|\.zip|\.tar\.bz2|\.tbz)$/;
+
+ push @$storage, $what;
+ }
+ }
+ close(TMP);
+ to_cache($cpauth, 'cpan_authors');
+ to_cache($cpmod, 'cpan_modules');
+ to_cache($cpdist, 'cpan_dists' );
+ 1;
+}
+
+# RETURNS
+# 1 if first version is bigger
+# 0 if both versions are equal
+# -1 if second version is bigger
+sub cmp_ver($$)
+{
+ my($a,$b) = @_;
+
+ while( $a and $b )
+ {
+ $a =~ s/^(\w*)//; my $a_w = $1||'';
+ $b =~ s/^(\w*)//; my $b_w = $1||'';
+
+ my $r = $a_w cmp $b_w;
+
+ return $r if $r;
+
+ $a =~ s/^(\d*)//; my $a_d = (defined($1) and $1 ne '') ? $1 : -1;
+ $b =~ s/^(\d*)//; my $b_d = (defined($1) and $1 ne '') ? $1 : -1;
+
+ $r = $a_d <=> $b_d;
+
+ return $r if $r;
+
+ $a =~ s/^(\D*)//; my $a_nd = $1||'';
+ $b =~ s/^(\D*)//; my $b_nd = $1||'';
+
+ $r = $a_nd cmp $b_nd;
+
+ return $r if $r;
+ }
+ return 1 if $a;
+ return -1 if $b;
+ return 0;
+}
+
+sub unmangle( $ $ )
+{
+ my( $ver, $mangles ) = @_;
+
+ return $ver unless $mangles;
+
+ my @vms = map( split(/;/, $_), @$mangles );
+
+ foreach my $vm( @vms )
+ {
+ eval "\$ver =~ $vm";
+ die "<<\$_ =~ $vm>> $@" if $@;
+ debugmsg(" mangled: $ver\n");
+ }
+
+ return $ver;
+}
+
+# RETURNS undef if all watch files point to CPAN
+sub latest_upstream_from_watch($)
+{
+ my ($watch) = @_;
+
+ my @vers;
+
+ foreach(@$watch)
+ {
+ my( $wline, $opts ) = @$_;
+
+ $wline =~ m{^((?:http|ftp)://\S+)/};
+ my $url = $1 or confess "Invalid watch line given? '$wline'";
+ $url =~ s{^http://sf.net/}{http://sf.net.projects/};
+
+ $wline =~ s{^http://sf\.net/(\S+)}{http://qa.debian.org/watch/sf.php/$1};
+
+ my @items = split(/\s+/, $wline);
+
+ my( $dir, $filter );
+
+ # Either we have single URL/pattern
+ # or URL/pattern + extra
+ if( $items[0] =~ /\(/ )
+ {
+ # Since '+' is greedy, the second capture has no slashes
+ ($dir, $filter) = $items[0] =~ m{^(.+)/(.+)$};
+ }
+ # or, we have a homepage plus pattern
+ # (plus optional other non-interesting stuff)
+ elsif( @items >= 2 and $items[1] =~ /\(/ )
+ {
+ ($dir, $filter) = @items[0,1];
+ }
+
+ if( $dir and $filter )
+ {
+ debugmsg( " uscan $dir $filter\n" );
+ $url ||= $dir;
+ my $page = LWP::Simple::get($dir) or return "Unable to get $dir (".__LINE__.")";
+ my $page_io = IO::Scalar->new(\$page);
+ while( <$page_io> )
+ {
+ warn $_ if 0;
+
+ if( $dir =~ /^http/ )
+ {
+ while( s/<a [^>]*href="([^"]+)"[^>]*>//i )
+ {
+ my $href = $1;
+ push @vers, [
+ unmangle( $1, $opts->{uversionmangle} ),
+ $url,
+ ] if $href =~ $filter;
+ }
+ }
+ else
+ {
+ while( s/(?:^|\s+)$filter(?:\s+|$)// )
+ {
+ push @vers, [
+ unmangle( $1, $opts->{uversionmangle} ),
+ $url,
+ ];
+ }
+ }
+ }
+ }
+ else
+ {
+ return "bad watch URL $wline";
+ }
+ }
+
+ @vers = sort { cmp_ver($a->[0],$b->[0]) } @vers;
+
+ my $ver = $vers[-1];
+ my $url;
+
+ ($ver, $url) = $ver ? @$ver : (undef, undef);
+
+ return wantarray ? ($ver, $url) : $ver;
+}
+
+# returns array of [ver, path]
+sub cpan_versions($$$)
+{
+ my($where, $wline, $opts) = @_;
+
+ my( $key, $filter );
+ # watch line is either:
+ # path/pattern
+ # or
+ # path pattern
+ my @elements = split(/\s+/, $wline);
+ # ignore version and script for version=2 watchlines
+ # (consider the first element only unless the second contains a capture)
+ @elements = $elements[0] if $elements[1] and $elements[1] !~ m{\(};
+ if( @elements == 1 )
+ { # "path/pattern"
+ $wline =~ m{
+ ^(\S*?) # some/path - captured
+ # non-greedy to not eat up the pattern
+ / # delimiter - '/'
+ ([^\s/]+) # the search pattern - no spaces, no slashes - captured
+ (?!.*\() # not followed by search pattern
+ }ix
+ and
+ ( $key, $filter ) = ($1, $2)
+ or
+ die "Strange one-element watchline '$wline'";
+ }
+ else
+ { # "path" "pattern" "other things" (ignored)
+ ( $key, $filter ) = @elements[0..1];
+
+ # could this be a dist search?
+ if ( $key =~ m{^http://search.cpan.org/dist/([^/]+)/$} )
+ {
+ $key = $1;
+ $filter =~ s{^.*/}{}; # remove prepended paths
+ }
+ else
+ {
+ # remove trailing slash (if present)
+ $key =~ s{/$}{};
+ }
+ }
+
+ debugmsg( sprintf( " module search %s %s\n", $key, $filter ) );
+
+ my $list = $where->{$key};
+ unless($list)
+ {
+ debugmsg("directory $key not found (from $wline) [".__LINE__."]\n");
+ return();
+ }
+
+ my @vers;
+ foreach(@$list)
+ {
+ if( $_ =~ $filter )
+ {
+ debugmsg(" looking at $_\n") if 0;
+ my $ver = unmangle( $1, $opts->{uversionmangle} );
+ push @vers, [$ver, $key];
+ }
+ }
+
+ return @vers;
+}
+
+# returns (version, URL)
+sub latest_upstream_from_cpan($$$$)
+{
+ my ($watch, $cpauth, $cpmod, $cpdist) = @_;
+
+ my @cpan = grep( $_->[0] =~ m{(?:^|\s)(?:http|ftp)://\S*cpan}i, @$watch );
+
+ return undef unless @cpan;
+
+ my @vers;
+
+ foreach(@cpan)
+ {
+ my( $wline, $opts ) = @$_;
+ if( $wline =~ s{^(?:http|ftp)://\S*cpan\S*/modules/by-module/}{}i )
+ {
+ # lookup by module
+ push @vers, map(
+ [ $_->[0], "http://www.cpan.org/modules/by-module/".$_->[1] ],
+ cpan_versions($cpmod, $wline, $opts),
+ );
+ }
+ elsif( $wline =~ s{^(?:http|ftp)://\S*cpan\S*/authors/(?:by-)?id/}{}i
+ or
+ $wline =~ s{^(?:http|ftp)://\S*cpan\S*/(?:by-)?authors/id/}{}i
+ )
+ {
+ # lookup by author
+ push @vers, map(
+ [ $_->[0], "http://www.cpan.org/authors/id/".$_->[1] ],
+ cpan_versions($cpauth, $wline, $opts),
+ );
+ }
+ elsif( $wline =~ m{(?:http|ftp)://search.cpan.org/dist/([^/]+)/?\s} )
+ {
+ # lookup by dist
+ my $dist = $1;
+ push @vers, map(
+ [ $_->[0], "http://search.cpan.org/dist/$dist/" ],
+ cpan_versions($cpdist, $wline, $opts),
+ );
+ }
+ else
+ {
+ debugmsg( sprintf( " can't determine type of search for %s\n", $wline ) );
+ return undef;
+ }
+ }
+
+ @vers = sort { cmp_ver($a->[0],$b->[0]) } @vers;
+
+ my $ver = $vers[-1];
+ my $url;
+ if( $ver )
+ {
+ ($ver, $url) = @$ver;
+ }
+ else
+ {
+ undef($ver); undef($url);
+ }
+
+ return wantarray ? ($ver, $url) : $ver;
+}
+
+sub unmangle_debian_version($$)
+{
+ my($ver, $watch) = @_;
+
+ foreach( @$watch )
+ {
+ my $dvm = $_->[1]->{dversionmangle} if $_->[1];
+ $dvm ||= [];
+
+ do {
+ eval "\$ver =~ $_";
+ die "\$ver =~ $dvm -> $@" if $@;
+ } foreach @$dvm;
+ }
+
+ return $ver;
+}
+
+sub read_changelog ($) {
+ my( $dir ) = @_;
+ debugmsg("Retrieving changelog for $dir\n" );
+
+ my $changelog;
+ my $svn_error;
+ {
+ my $changelog_fh = IO::Scalar->new( \$changelog );
+ local $SVN::Error::handler = undef;
+ ($svn_error) = $svn->cat(
+ $changelog_fh,
+ "$SVN_REPO/trunk/$dir/debian/changelog",
+ 'HEAD',
+ );
+ }
+ if(SVN::Error::is_error($svn_error))
+ {
+ if($svn_error->apr_err() == $SVN::Error::FS_NOT_FOUND)
+ {
+ $svn_error->clear();
+ }
+ else
+ {
+ SVN::Error::croak_on_error($svn_error);
+ }
+ }
+ if(! $changelog) {
+ return { chl_ver => "Missing changelog" };
+ }
+
+ my @chl = Parse::DebianChangelog->init({instring=>$changelog})->data;
+ my @validchl = grep({ $_->Distribution eq 'unstable' and
+ $_->Changes !~ /NOT RELEASED/ } @chl);
+ my($chl, $ver);
+ if(@validchl) {
+ $chl = shift @validchl;
+ $ver = $chl->Version;
+ } elsif(@chl) {
+ $chl = shift @chl;
+ $ver = "Unreleased";
+ } else {
+ return { chl_ver => "Invalid changelog" };
+ }
+ return {
+ chl_ver => $ver,
+ chl_changer => $chl->Maintainer,
+ chl_date => $chl->Date,
+ chl_pkg => $chl->Source,
+ chl_native => scalar($chl->Version !~ /-./)
+ };
+}
+sub read_watch ($) {
+ my( $dir ) = @_;
+ debugmsg("Retrieving watch for $dir\n" );
+
+ my $svn_error;
+ my $watch;
+ {
+ my $watch_io = IO::Scalar->new(\$watch);
+ local $SVN::Error::handler = undef;
+ ($svn_error) = $svn->cat(
+ $watch_io,
+ "$SVN_REPO/trunk/$dir/debian/watch",
+ 'HEAD',
+ );
+ $watch_io->close();
+ }
+ if(SVN::Error::is_error($svn_error))
+ {
+ if($svn_error->apr_err() == $SVN::Error::FS_NOT_FOUND)
+ {
+ $svn_error->clear();
+ }
+ else
+ {
+ SVN::Error::croak_on_error($svn_error);
+ }
+ }
+ if( not $watch) {
+ return 'missing';
+ }
+
+ $watch =~ s/\\\n//gs;
+ my @watch_lines = split(/\n/, $watch) if $watch;
+ @watch_lines = grep( (!/^#/ and !/^version=/ and !/^\s*$/), @watch_lines );
+
+ my @watch;
+ foreach(@watch_lines)
+ {
+ debugmsg( " watch line $_\n" ) if 0;
+ # opts either contain no spaces, or is enclosed in double-quotes
+ my $opts = $1 if s!^\s*opts="([^"]*)"\s+!! or s!^\s*opts=(\S*)\s+!!;
+ debugmsg( " watch options = $opts\n" ) if $opts;
+ # several options are separated by comma and commas are not allowed within
+ my @opts = split(/\s*,\s*/, $opts) if $opts;
+ my %opts;
+ foreach(@opts)
+ {
+ next if /^(?:active|passive|pasv)$/;
+
+ /([^=]+)=(.*)/;
+ debugmsg( " watch option $1 = $2\n" );
+ if( $1 eq 'versionmangle' )
+ {
+ push @{ $opts{uversionmangle} }, $2;
+ push @{ $opts{dversionmangle} }, $2;
+ }
+ else
+ {
+ push @{ $opts{$1} }, $2;
+ }
+ }
+ s!^http://www.cpan.org/!$CPAN_MIRROR/!;
+ s!^ftp://www.cpan.org/!$CPAN_MIRROR/!;
+ s!^http://backpan.perl.org/authors/!$CPAN_MIRROR/authors/!;
+ s!^http://mirrors.kernel.org/cpan/!$CPAN_MIRROR/!;
+ s!^ftp://mirrors.kernel.org/cpan/!$CPAN_MIRROR/!;
+
+ push @watch, [ $_, \%opts ];
+ }
+
+ if( not @watch )
+ {
+ warn "invalid debian/watch" if 0;
+ return 'invalid';
+ }
+ debugmsg('Found valid debian/watch') if 0;
+ return ( 'valid', @watch );
+}
+
+my $header = <<_EOF;
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+ "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ <title>pkg-perl package versions</title>
+ <style type="text/css">
+ body {
+ background: white;
+ color: black;
+ }
+ table {
+ border: 1px solid black;
+ border-collapse: collapse;
+ empty-cells: show;
+ }
+ td, th {
+ border: 1px solid black;
+ }
+ .upload {
+ background: lightsalmon;
+ }
+ .upgrade {
+ background: lightblue;
+ }
+ </style>
+</head>
+<body>
+<table>
+<tr>
+<td>
+<table>
+<tr><th>Legend</th></tr>
+<tr><td class="upload">Needs uploading</td></tr>
+<tr><td class="upgrade">Needs upgrade from upstream</td></tr>
+</table>
+</td>
+<td>
+ <a href="http://pkg-perl.alioth.debian.org/">http://pkg-perl.alioth.debian.org</a>
+</td>
+</tr>
+</table>
+
+<br>
+
+<table>
+<tr>
+ <th>Package</th>
+ <th>Repository</th>
+ <th>Archive</th>
+ <th>upstream</th>
+</tr>
+_EOF
+
+my $total = 0;
+my $total_shown = 0;
+my $chunk;
+
+# loop over packages
+my @svn_packages = sort(keys(%{$svn->ls("$SVN_REPO/trunk", 'HEAD', 0)}));
+my $cur_ver;
+$svn->info("$SVN_REPO/trunk", undef, "HEAD", sub {
+ $cur_ver = $_[1]->rev();
+ }, 0);
+
+my %maindata;
+my(@wmodified, @cmodified);
+if(not $force_rescan and from_cache(\%maindata, "maindata", 168)) { # 1 week
+ if($maindata{packages}) {
+ debugmsg("Converting maindata hash\n");
+ my %md;
+ $md{"//lastrev"} = $maindata{lastrev};
+ $md{$_} = $maindata{packages}{$_} foreach(
+ keys %{$maindata{packages}});
+ %maindata = %md;
+ }
+ $svn->log( ["$SVN_REPO/trunk"], $maindata{"//lastrev"}, "HEAD", 1, 1, sub {
+ return if($_[1] <= $maindata{"//lastrev"});
+ debugmsg("Scanning changes from revision $_[1]:\n");
+ foreach(keys %{$_[0]}) {
+ debugmsg("- $_\n");
+ if(m{^/?trunk/([^/]+)/debian/(changelog|watch)$}) {
+ if($2 eq "changelog") {
+ push @cmodified, $1;
+ } else {
+ push @wmodified, $1;
+ }
+ }
+ }
+ }
+ );
+}
+$maindata{"//lastrev"} = $cur_ver;
+foreach(@pkg_rescan) { # forced rescan of packages
+ push @wmodified, $_;
+ push @cmodified, $_;
+}
+foreach(@svn_packages) {
+ next if($maindata{$_});
+ $maindata{$_} = {};
+ push @wmodified, $_;
+ push @cmodified, $_;
+}
+my %tmp = map({ $_ => 1 } @cmodified); # eliminate dupes
+foreach my $pkg (keys %tmp) {
+ $maindata{$pkg} ||= {};
+ foreach(keys %{$maindata{$pkg}}) {
+ delete $maindata{$pkg}{$_} if(/^chl_/);
+ }
+ my $data = read_changelog($pkg);
+ foreach(keys %$data) {
+ $maindata{$pkg}{$_} = $data->{$_};
+ }
+ delete $maindata{$pkg}{watch_unmangled_ver};
+ if($maindata{$pkg}{chl_ver} and $maindata{$pkg}{watch}) {
+ my $up_svn = $maindata{$pkg}{chl_ver};
+ $up_svn =~ s/^(?:\d+:)?(.+?)(?:-[^-]+)?$/$1/;
+ $up_svn = unmangle_debian_version($up_svn, $maindata{$pkg}{watch});
+ $maindata{$pkg}{watch_unmangled_ver} = $up_svn;
+ }
+}
+if($cpan_updated) {
+ push @wmodified, grep(
+ { $maindata{$_}{watch_cpan} }
+ @svn_packages );
+}
+%tmp = map({ $_ => 1 } @wmodified); # eliminate dupes
+foreach(keys %tmp) {
+ my $pkg = $maindata{$_};
+ my($st, @data) = read_watch($_);
+ debugmsg("$_: $st ".scalar(@data)) if 0;
+ foreach(keys %{$pkg}) {
+ delete $pkg->{$_} if(/^watch_/);
+ }
+ $pkg->{watch_url} = "";
+ $pkg->{watch_ver} = "";
+ $pkg->{watch_unmangled_ver} = $pkg->{chl_ver};
+ unless($st eq "valid") {
+ if($st eq "missing" and $pkg->{chl_native}) {
+ $pkg->{watch_ver} = $pkg->{chl_ver};
+ } elsif($st eq "invalid") {
+ $pkg->{watch_ver} = "Invalid debian/watch";
+ $pkg->{watch_url} = qq(http://svn.debian.org/wsvn/pkg-perl/trunk/$_/debian/watch?op=file&rev=0&sc=0);
+ } else { # missing
+ $pkg->{watch_ver} = "Missing debian/watch";
+ }
+ next;
+ }
+ $pkg->{watch} = \@data;
+
+ my($upstream_ver, $upstream_url) = latest_upstream_from_cpan(\@data,
+ \%cpan_authors, \%cpan_modules, \%cpan_dists);
+ if( $upstream_ver ) {
+ $pkg->{watch_cpan} = 1;
+ } else {
+ ($upstream_ver, $upstream_url) = latest_upstream_from_watch(\@data);
+ }
+ if( $upstream_ver ) {
+ $pkg->{watch_ver} = $upstream_ver;
+ $pkg->{watch_url} = $upstream_url || "";
+ } else {
+ $pkg->{watch_ver} = "Invalid debian/watch";
+ $pkg->{watch_url} = qq(http://svn.debian.org/wsvn/pkg-perl/trunk/$_/debian/watch?op=file&rev=0&sc=0);
+ }
+ if($pkg->{chl_ver}) {
+ my $up_svn = $pkg->{chl_ver};
+ $up_svn =~ s/^(?:\d+:)?(.+?)(?:-[^-]+)?$/$1/;
+ $up_svn = unmangle_debian_version($up_svn, \@data);
+ $pkg->{watch_unmangled_ver} = $up_svn;
+ }
+}
+to_cache(\%maindata, "maindata");
+
+my @pkgs_to_check;
+my $showalways;
+if( @ARGV )
+{
+ @pkgs_to_check = @ARGV;
+ $showalways = 1;
+}
+else
+{
+ debugmsg(
+ sprintf(
+ "%d entries in trunk\n",
+ scalar(@svn_packages)
+ ),
+ );
+ @pkgs_to_check = @svn_packages;
+}
+
+print $header;
+foreach ( @pkgs_to_check )
+{
+ $total++;
+
+ my $pkgd = $maindata{$_};
+ my $spkg = $maindata{$_}{chl_pkg} or die "No source package for $_?";
+ debugmsg("Examining $_ (src:$spkg)\n" );
+
+ debugmsg(sprintf(" - Archive has %s\n", $packages{$spkg} || 'none'));
+ debugmsg(sprintf(" - experimental has %s\n",
+ $experimental{$spkg} || 'none'));
+ debugmsg(sprintf(" - stable has %s\n", $stable{$spkg} || 'none'));
+ debugmsg(sprintf(" - oldstable has %s\n", $oldstable{$spkg} || 'none'));
+ debugmsg(sprintf(" - incoming has %s\n", $incoming{$spkg} || 'none' ));
+ debugmsg(sprintf(" - NEW has %s\n", $new{$spkg} || 'none'));
+ debugmsg(sprintf(" - %s has %s (%s)\n",
+ $pkgd->{watch_cpan} ? "CPAN" : "upstream",
+ $pkgd->{watch_ver} || 'none', $pkgd->{watch_url} || 'no url'));
+ debugmsg(sprintf(" - SVN has %s (upstream version=%s)\n",
+ $pkgd->{chl_ver} || 'none', $pkgd->{watch_unmangled_ver} || 'none'));
+
+ next unless($showalways or
+ $pkgd->{watch_unmangled_ver} ne $pkgd->{watch_ver}
+ or
+ (! $packages{$spkg} or $pkgd->{chl_ver} ne $packages{$spkg})
+ and
+ (! $incoming{$spkg} or $pkgd->{chl_ver} ne $incoming{$spkg})
+ and
+ (! $new{$spkg} or $pkgd->{chl_ver} ne $new{$spkg})
+ );
+ $total_shown++;
+ my $text = "<tr>\n";
+ $text .= "<td>".(
+ ($packages{$spkg})
+ ? qq(<a href="http://packages.qa.debian.org/$spkg">$spkg</a>)
+ : qq($spkg)
+ )."</td>\n";
+
+ $text .= "<td".(
+ (! $packages{$spkg} or $pkgd->{chl_ver} ne $packages{$spkg})
+ ? ' class="upload">'
+ : '>');
+ $text .= qq(<a href="http://svn.debian.org/wsvn/pkg-perl/trunk/$_/debian/changelog?op=file&rev=0&sc=0" title=") . $pkgd->{chl_changer} . "\n" . $pkgd->{chl_date} . "\">" .$pkgd->{chl_ver} . "</a></td>\n";
+
+ my $archive_text = join(
+ "\n",
+ $packages{$spkg}||(),
+ (
+ ($incoming{$spkg})
+ ? "Incoming: $incoming{$spkg}"
+ : ()
+ ),
+ (
+ ($new{$spkg})
+ ? "NEW: $new{$spkg}"
+ : ()
+ ),
+ (
+ ($experimental{$spkg})
+ ? "experimental: $experimental{$spkg}"
+ : ()
+ ),
+ (
+ ($stable{$spkg} and not $packages{$spkg} and not $experimental{$spkg})
+ ? "stable: $stable{$spkg}"
+ : ()
+ ),
+ (
+ ($oldstable{$spkg} and not $stable{$spkg} and not $packages{$spkg} and not $experimental{$spkg})
+ ? "oldstable: $oldstable{$spkg}"
+ : ()
+ ),
+ );
+
+ $archive_text = qq(<a href="http://packages.qa.debian.org/$spkg">$archive_text</a> [<a style="font-size:smaller" href="http://bugs.debian.org/src:$spkg">BTS</a>]) if $packages{$spkg} or $experimental{$spkg} or $stable{$spkg} or $oldstable{$spkg};
+
+ $text .= "<td>$archive_text</td>\n";
+
+ my $upstream_text = (
+ $pkgd->{watch_cpan} ? "CPAN: " : "") . $pkgd->{watch_ver};
+ $upstream_text = qq(<a href=") . $pkgd->{watch_url} . qq(">$upstream_text</a>) if $pkgd->{watch_url};
+
+ $text .= (
+ ($pkgd->{watch_unmangled_ver} ne $pkgd->{watch_ver})
+ ? qq(<td class="upgrade">$upstream_text</td>\n)
+ : "<td></td>\n"
+ );
+ $text .= "</tr>\n";
+ print $text;
+}
+
+my $date = gmtime;
+my $footer = <<_EOF;
+<tr><td colspan=\"4\"><b>TOTAL: $total_shown/$total</b></td></tr>
+</table>
+<hr>
+$date UTC<br>
+<i>$THIS_REVISION</i>
+</body>
+_EOF
+
+print $footer;
+
+unlink $lockfile or die $!;
+
+# vim: et:sts=4:ai:sw=4
Property changes on: trunk/community/qa/oldscripts/versioncheck2.pl
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/oldscripts/versioncheck3.pl
===================================================================
--- trunk/community/qa/oldscripts/versioncheck3.pl (rev 0)
+++ trunk/community/qa/oldscripts/versioncheck3.pl 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,1102 @@
+#!/usr/bin/perl -w
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+### TODO ###
+#
+# Try harder to use 02packages.details.gz for authoritative CPAN
+# version source, regardless of whether debian/watch uses by-module URL
+# or by-author one
+#
+# Use AptPkg::Version for
+# - version comparison
+# - stripping debian revision off from a version
+
+our $THIS_REVISION = '$Id: versioncheck3.pl 8974 2007-11-07 15:28:29Z gregoa-guest $';
+
+BEGIN {
+ my $self_dir = $0;
+ $self_dir =~ s{/[^/]+$}{};
+ unshift @INC, $self_dir;
+};
+
+use strict;
+use Carp qw(confess);
+use Common;
+use LWP::Simple ();
+use LWP::UserAgent;
+use Compress::Zlib ();
+use HTML::TableExtract;
+use SVN::Client;
+use SVN::Core;
+use IO::Scalar;
+use Parse::DebianChangelog;
+use Getopt::Long;
+use File::Path;
+use SOAP::Lite;
+
+our $opt_debug = 0;
+my $force_cpan = 0;
+my $force_rescan = 0;
+my @pkg_rescan = ();
+our $CACHEDIR = "$ENV{HOME}/.dpg/versioncheck";
+our $svn = SVN::Client->new();
+
+GetOptions(
+ 'debug!' => \$opt_debug,
+ 'force-cpan!' => \$force_cpan,
+ 'force-rescan!' => \$force_rescan,
+ 'rescan=s' => \@pkg_rescan,
+ 'cache-dir=s' => \$CACHEDIR
+);
+
+sub debugmsg(@)
+{
+ warn @_ if $opt_debug;
+};
+
+mkpath $CACHEDIR;
+my $lockfile = "$CACHEDIR/.lock";
+if(-e $lockfile) {
+ if(-M $lockfile > 1/24) { # 1 hour
+ debugmsg("Stale lock file -- deleting\n");
+ unlink $lockfile or die $!;
+ } else {
+ die("Other instance of $0 is running!\n");
+ }
+}
+$SIG{HUP} = $SIG{INT} = $SIG{QUIT} = \&sighandler;
+$SIG{SEGV} = $SIG{PIPE} = $SIG{TERM} = \&sighandler;
+$SIG{__DIE__} = \&diehandler;
+open(LOCK, ">", $lockfile) or die $!;
+close(LOCK) or die $!;
+
+# Get some information globally
+
+use Storable();
+use LWP::UserAgent;
+
+debugmsg( "CPAN mirror is $CPAN_MIRROR\n" );
+debugmsg( "The cache is in $CACHEDIR\n" );
+
+sub diehandler
+{
+ die @_ if($^S); # eval
+ debugmsg("Removing lockfile...\n");
+ unlink $lockfile;
+ die @_;
+}
+sub sighandler
+{
+ my $sig = shift;
+ warn "Caught $sig signal...\n";
+ debugmsg("Removing lockfile...\n");
+ unlink $lockfile;
+ # signal myself again
+ $SIG{$sig} = "DEFAULT";
+ kill $sig, $$;
+}
+sub from_cache($$$)
+{
+ my( $ref, $name, $max_age) = @_;
+
+ my $dir = $CACHEDIR;
+
+ return undef unless -f "$dir/$name" and -M(_) <= $max_age/24;
+
+ my $data = Storable::retrieve("$dir/$name");
+ return undef unless $data;
+
+ debugmsg("$name loaded from cache (".scalar(keys(%$data)).")\n");
+
+ %$ref = %$data;
+ return 1;
+}
+
+sub to_cache($$)
+{
+ my( $ref, $name) = @_;
+
+ Storable::store($ref, "$CACHEDIR/$name");
+}
+
+sub scan_packages($$)
+{
+ my( $suite, $hash ) = @_;
+ foreach my $section ( qw( main contrib non-free ) )
+ {
+ # TODO This is somewhat brute-force, reading the whole sources into
+ # memory, then de-compressing them also in memory.
+ # Should be made incremental using reasonable-sized buffer
+ my $url = "$MIRROR/debian/dists/$suite/$section/source/Sources.gz";
+ my $sources_gz = LWP::Simple::get($url);
+ unless($sources_gz) {
+ warn "Can't download $url";
+ return 0;
+ }
+ my $sources = Compress::Zlib::memGunzip(\$sources_gz);
+ my $src_io = IO::Scalar->new(\$sources);
+
+ my $pkg;
+ while( <$src_io> )
+ {
+ chomp;
+ if( s/^Package: // )
+ {
+ $pkg = $_;
+ next;
+ }
+
+ if( s/^Version: // )
+ {
+ $hash->{$pkg} = $_;
+ }
+ }
+ }
+
+ debugmsg(
+ sprintf(
+ "Information about %d %s packages loaded\n",
+ scalar(keys(%$hash)),
+ $suite,
+ ),
+ );
+ to_cache($hash, $suite);
+ 1;
+}
+
+my %packages; # contains {package => version} pairs
+unless(from_cache(\%packages, 'unstable', 6)) {
+ scan_packages('unstable', \%packages)
+ or from_cache(\%packages, 'unstable', 999) or die;
+}
+
+my %experimental; # contains {package => version} pairs
+unless(from_cache(\%experimental, 'experimental', 6)) {
+ scan_packages('experimental', \%experimental)
+ or from_cache(\%experimental, 'experimental', 999) or die;
+}
+
+my %stable; # contains {package => version} pairs
+unless(from_cache(\%stable, 'stable', 168)) {
+ scan_packages('stable', \%stable)
+ or from_cache(\%stable, 'stable', 999) or die;
+}
+
+my %oldstable; # contains {package => version} pairs
+unless(from_cache(\%oldstable, 'oldstable', 168)) {
+ scan_packages('oldstable', \%oldstable)
+ or from_cache(\%oldstable, 'oldstable', 999) or die;
+}
+
+my %incoming; # contains {package => version} pairs
+unless(from_cache(\%incoming, 'incoming', 1)) {
+ scan_incoming(\%incoming)
+ or from_cache(\%incoming, 'incoming', 999) or die;
+}
+
+my %new; # contains {package => version} pairs
+unless(from_cache(\%new, 'new', 1)) {
+ scan_new(\%new)
+ or from_cache(\%new, 'new', 999) or die;
+}
+
+my %bugs; # contains {package => bugcount} pairs
+unless(from_cache(\%bugs, 'bugs', 6)) {
+ scan_bugs(\%bugs)
+ or from_cache(\%new, 'new', 999) or die;
+}
+
+my( %cpan_authors, %cpan_modules, %cpan_dists, $cpan_updated );
+unless(not $force_cpan
+ and from_cache(\%cpan_authors, 'cpan_authors', 12)
+ and from_cache(\%cpan_modules, 'cpan_modules', 12)
+ and from_cache(\%cpan_dists, 'cpan_dists', 12))
+{
+ if(scan_cpan(\%cpan_authors, \%cpan_modules, \%cpan_dists)) {
+ $cpan_updated = 1;
+ } else {
+ from_cache(\%cpan_authors, 'cpan_authors', 999) or die;
+ from_cache(\%cpan_modules, 'cpan_modules', 999) or die;
+ from_cache(\%cpan_dists, 'cpan_dists', 999) or die;
+ }
+}
+
+sub scan_incoming {
+ my $inchash = shift;
+ my $ua = new LWP::UserAgent;
+ $ua->timeout(10);
+ my $res = $ua->get('http://incoming.debian.org');
+ return 0 unless $res->is_success;
+ my $incoming = $res->content();
+ my $inc_io = IO::Scalar->new(\$incoming);
+ while( <$inc_io> )
+ {
+ chomp;
+ next unless /a href="([^_]+)_(.+)\.dsc"/;
+
+ $inchash->{$1} = $2;
+ }
+ to_cache($inchash, "incoming");
+ debugmsg( sprintf("Information about %d incoming packages loaded\n",
+ scalar(keys(%$inchash))) );
+};
+
+sub scan_new {
+ my $newhash = shift;
+ my $ua = new LWP::UserAgent;
+ $ua->timeout(10);
+ my $res = $ua->get('http://ftp-master.debian.org/new.html');
+ return 0 unless $res->is_success;
+ my $new = $res->content();
+ my $te = HTML::TableExtract->new(
+ headers=> [
+ qw(Package Version Arch Distribution Age Maintainer Closes)
+ ],
+ );
+ $te->parse($new);
+ foreach my $table( $te->tables )
+ {
+ foreach my $row( $table->rows )
+ {
+ next unless $row->[2] =~ /source/;
+
+ my @versions = split(/\n/, $row->[1]);
+ s/<br>// foreach @versions;
+
+ $newhash->{$row->[0]} = $versions[-1];# use the last uploaded version
+ }
+ }
+ to_cache($newhash, "new");
+ debugmsg( sprintf("Information about %d NEW packages loaded\n",
+ scalar(keys(%$newhash))) );
+}
+
+sub scan_bugs {
+ my $bughash = shift;
+
+ my $soap = SOAP::Lite->uri('Debbugs/SOAP')->proxy('http://bugs.debian.org/cgi-bin/soap.cgi');
+ my $pkgperlbugs = $soap->get_status($soap->get_bugs(maint=>'pkg-perl-maintainers at lists.alioth.debian.org')->result())->result;
+ foreach my $bug(keys %$pkgperlbugs)
+ {
+ my $pkgname = $pkgperlbugs->{$bug}->{package};
+ my $done = $pkgperlbugs->{$bug}->{done};
+ $bughash->{$pkgname}++ unless $done;
+ }
+
+ to_cache($bughash, "bugs");
+ debugmsg( sprintf("Information about bugs for %d packages loaded\n",
+ scalar(keys(%$bughash))) );
+}
+
+sub scan_cpan {
+ my( $cpauth, $cpmod, $cpdist ) = @_;
+ open(TMP, '+>', undef) or die "Unable to open anonymous temporary file";
+ my $old = select(TMP);
+ my $lslr = LWP::Simple::getprint("$CPAN_MIRROR/ls-lR.gz");
+ unless(-s TMP) {
+ close TMP;
+ return 0;
+ }
+ select($old);
+ seek(TMP, 0, 0);
+ my $gz = Compress::Zlib::gzopen(\*TMP, 'rb') or die $Compress::Zlib::gzerrno;
+
+ my $storage;
+ my ($section, $path);
+ while( $gz->gzreadline($_) )
+ {
+ chomp;
+ next unless $_;
+
+ # catch dist
+ if( m{
+ \s # blank
+ ( # $1 will capture the whole file name
+ (\S+?) # dist name - in $2
+ - # separator - dash
+ v? # optional 'v' before the version
+ (?: # version
+ \d # starts with a digit
+ [\d._]+ # followed by digits, periods and underscores
+ )
+ (?: # file extension
+ \.tar # .tar
+ (?:\.gz)? # most probably followed with .gz
+ | \.zip # yeah, that ugly OS is not wiped yet
+ )
+ )$}x # and this finishes the line
+ )
+ {
+ $cpdist->{$2} ||= [];
+ push @{ $cpdist->{$2} }, $1;
+ }
+
+ if( m{^\./authors/id/(.+):} )
+ {
+ $storage = $cpauth->{$1} ||= [];
+ }
+ elsif( m{^\./modules/by-module/(.+):} )
+ {
+ $storage = $cpmod->{$1} ||= [];
+ }
+ elsif( m{\..*:} )
+ {
+ undef($storage);
+ }
+ else
+ {
+ next unless $storage;
+
+ my(
+ $perm, $ln, $o, $g, $size, $month, $day, $time, $what, $where,
+ ) = split(/\s+/);
+
+ next unless $what and $what =~ /(?:tar\.gz|\.tgz|\.zip|\.tar\.bz2|\.tbz)$/;
+
+ push @$storage, $what;
+ }
+ }
+ close(TMP);
+ to_cache($cpauth, 'cpan_authors');
+ to_cache($cpmod, 'cpan_modules');
+ to_cache($cpdist, 'cpan_dists' );
+ 1;
+}
+
+# RETURNS
+# 1 if first version is bigger
+# 0 if both versions are equal
+# -1 if second version is bigger
+sub cmp_ver($$)
+{
+ my($a,$b) = @_;
+
+ while( $a and $b )
+ {
+ $a =~ s/^(\w*)//; my $a_w = $1||'';
+ $b =~ s/^(\w*)//; my $b_w = $1||'';
+
+ my $r = $a_w cmp $b_w;
+
+ return $r if $r;
+
+ $a =~ s/^(\d*)//; my $a_d = (defined($1) and $1 ne '') ? $1 : -1;
+ $b =~ s/^(\d*)//; my $b_d = (defined($1) and $1 ne '') ? $1 : -1;
+
+ $r = $a_d <=> $b_d;
+
+ return $r if $r;
+
+ $a =~ s/^(\D*)//; my $a_nd = $1||'';
+ $b =~ s/^(\D*)//; my $b_nd = $1||'';
+
+ $r = $a_nd cmp $b_nd;
+
+ return $r if $r;
+ }
+ return 1 if $a;
+ return -1 if $b;
+ return 0;
+}
+
+sub unmangle( $ $ )
+{
+ my( $ver, $mangles ) = @_;
+
+ return $ver unless $mangles;
+
+ my @vms = map( split(/;/, $_), @$mangles );
+
+ foreach my $vm( @vms )
+ {
+ eval "\$ver =~ $vm";
+ die "<<\$_ =~ $vm>> $@" if $@;
+ debugmsg(" mangled: $ver\n");
+ }
+
+ return $ver;
+}
+
+# RETURNS undef if all watch files point to CPAN
+sub latest_upstream_from_watch($)
+{
+ my ($watch) = @_;
+
+ my @vers;
+
+ foreach(@$watch)
+ {
+ my( $wline, $opts ) = @$_;
+
+ $wline =~ m{^((?:http|ftp)://\S+)/};
+ my $url = $1 or confess "Invalid watch line given? '$wline'";
+ $url =~ s{^http://sf.net/}{http://sf.net.projects/};
+
+ $wline =~ s{^http://sf\.net/(\S+)}{http://qa.debian.org/watch/sf.php/$1};
+
+ my @items = split(/\s+/, $wline);
+
+ my( $dir, $filter );
+
+ # Either we have single URL/pattern
+ # or URL/pattern + extra
+ if( $items[0] =~ /\(/ )
+ {
+ # Since '+' is greedy, the second capture has no slashes
+ ($dir, $filter) = $items[0] =~ m{^(.+)/(.+)$};
+ }
+ # or, we have a homepage plus pattern
+ # (plus optional other non-interesting stuff)
+ elsif( @items >= 2 and $items[1] =~ /\(/ )
+ {
+ ($dir, $filter) = @items[0,1];
+ }
+
+ if( $dir and $filter )
+ {
+ debugmsg( " uscan $dir $filter\n" );
+ $url ||= $dir;
+ my $page = LWP::Simple::get($dir) or return "Unable to get $dir (".__LINE__.")";
+ my $page_io = IO::Scalar->new(\$page);
+ while( <$page_io> )
+ {
+ warn $_ if 1;
+
+ if( $dir =~ /^http/ )
+ {
+ while( s/<a [^>]*href="([^"]+)"[^>]*>//i )
+ {
+ my $href = $1;
+ push @vers, [
+ unmangle( $1, $opts->{uversionmangle} ),
+ $url,
+ ] if $href =~ $filter;
+ }
+ }
+ else
+ {
+ while( s/(?:^|\s+)$filter(?:\s+|$)// )
+ {
+ push @vers, [
+ unmangle( $1, $opts->{uversionmangle} ),
+ $url,
+ ];
+ }
+ }
+ }
+ }
+ else
+ {
+ return "bad watch URL $wline";
+ }
+ }
+
+ @vers = sort { cmp_ver($a->[0],$b->[0]) } @vers;
+
+ my $ver = $vers[-1] || '';
+ my $url;
+
+ ($ver, $url) = $ver ? @$ver : (undef, undef);
+
+ return wantarray ? ($ver, $url) : $ver;
+}
+
+# returns array of [ver, path]
+sub cpan_versions($$$)
+{
+ my($where, $wline, $opts) = @_;
+
+ my( $key, $filter );
+ # watch line is either:
+ # path/pattern
+ # or
+ # path pattern
+ my @elements = split(/\s+/, $wline);
+ # ignore version and script for version=2 watchlines
+ # (consider the first element only unless the second contains a capture)
+ @elements = $elements[0] if $elements[1] and $elements[1] !~ m{\(};
+ if( @elements == 1 )
+ { # "path/pattern"
+ $wline =~ m{
+ ^(\S*?) # some/path - captured
+ # non-greedy to not eat up the pattern
+ / # delimiter - '/'
+ ([^\s/]+) # the search pattern - no spaces, no slashes - captured
+ (?!.*\() # not followed by search pattern
+ }ix
+ and
+ ( $key, $filter ) = ($1, $2)
+ or
+ die "Strange one-element watchline '$wline'";
+ }
+ else
+ { # "path" "pattern" "other things" (ignored)
+ ( $key, $filter ) = @elements[0..1];
+
+ # could this be a dist search?
+ if ( $key =~ m{^http://search.cpan.org/dist/([^/]+)/$} )
+ {
+ $key = $1;
+ $filter =~ s{^.*/}{}; # remove prepended paths
+ }
+ else
+ {
+ # remove trailing slash (if present)
+ $key =~ s{/$}{};
+ }
+ }
+
+ debugmsg( sprintf( " module search %s %s\n", $key, $filter ) );
+
+ my $list = $where->{$key};
+ unless($list)
+ {
+ debugmsg("directory $key not found (from $wline) [".__LINE__."]\n");
+ return();
+ }
+
+ my @vers;
+ foreach(@$list)
+ {
+ if( $_ =~ $filter )
+ {
+ debugmsg(" looking at $_\n") if 0;
+ my $ver = unmangle( $1, $opts->{uversionmangle} );
+ push @vers, [$ver, $key];
+ }
+ }
+
+ return @vers;
+}
+
+# returns (version, URL)
+sub latest_upstream_from_cpan($$$$)
+{
+ my ($watch, $cpauth, $cpmod, $cpdist) = @_;
+
+ my @cpan = grep( $_->[0] =~ m{(?:^|\s)(?:http|ftp)://\S*cpan}i, @$watch );
+
+ return undef unless @cpan;
+
+ my @vers;
+
+ foreach(@cpan)
+ {
+ my( $wline, $opts ) = @$_;
+ if( $wline =~ s{^(?:http|ftp)://\S*cpan\S*/modules/by-module/}{}i )
+ {
+ # lookup by module
+ push @vers, map(
+ [ $_->[0], "http://www.cpan.org/modules/by-module/".$_->[1] ],
+ cpan_versions($cpmod, $wline, $opts),
+ );
+ }
+ elsif( $wline =~ s{^(?:http|ftp)://\S*cpan\S*/authors/(?:by-)?id/}{}i
+ or
+ $wline =~ s{^(?:http|ftp)://\S*cpan\S*/(?:by-)?authors/id/}{}i
+ )
+ {
+ # lookup by author
+ push @vers, map(
+ [ $_->[0], "http://www.cpan.org/authors/id/".$_->[1] ],
+ cpan_versions($cpauth, $wline, $opts),
+ );
+ }
+ elsif( $wline =~ m{(?:http|ftp)://search.cpan.org/dist/([^/]+)/?\s} )
+ {
+ # lookup by dist
+ my $dist = $1;
+ push @vers, map(
+ [ $_->[0], "http://search.cpan.org/dist/$dist/" ],
+ cpan_versions($cpdist, $wline, $opts),
+ );
+ }
+ else
+ {
+ debugmsg( sprintf( " can't determine type of search for %s\n", $wline ) );
+ return undef;
+ }
+ }
+
+ @vers = sort { cmp_ver($a->[0],$b->[0]) } @vers;
+
+ my $ver = $vers[-1];
+ my $url;
+ if( $ver )
+ {
+ ($ver, $url) = @$ver;
+ }
+ else
+ {
+ undef($ver); undef($url);
+ }
+
+ return wantarray ? ($ver, $url) : $ver;
+}
+
+sub unmangle_debian_version($$)
+{
+ my($ver, $watch) = @_;
+
+ foreach( @$watch )
+ {
+ my $dvm = $_->[1]->{dversionmangle} if $_->[1];
+ $dvm ||= [];
+
+ do {
+ eval "\$ver =~ $_";
+ die "\$ver =~ $dvm -> $@" if $@;
+ } foreach @$dvm;
+ }
+
+ return $ver;
+}
+
+sub read_changelog ($) {
+ my( $dir ) = @_;
+ debugmsg("Retrieving changelog for $dir\n" );
+
+ my $changelog;
+ my $svn_error;
+ {
+ my $changelog_fh = IO::Scalar->new( \$changelog );
+ local $SVN::Error::handler = undef;
+ ($svn_error) = $svn->cat(
+ $changelog_fh,
+ "$SVN_REPO/trunk/$dir/debian/changelog",
+ 'HEAD',
+ );
+ }
+ if(SVN::Error::is_error($svn_error))
+ {
+ if($svn_error->apr_err() == $SVN::Error::FS_NOT_FOUND)
+ {
+ $svn_error->clear();
+ }
+ else
+ {
+ SVN::Error::croak_on_error($svn_error);
+ }
+ }
+ if(! $changelog) {
+ return { chl_ver => "Missing changelog" };
+ }
+
+ my @chl = Parse::DebianChangelog->init({instring=>$changelog})->data;
+ my @validchl = grep({ $_->Distribution eq 'unstable' and
+ $_->Changes !~ /NOT RELEASED/ } @chl);
+ my($chl, $ver);
+ if(@validchl) {
+ $chl = shift @validchl;
+ $ver = $chl->Version;
+ } elsif(@chl) {
+ $chl = shift @chl;
+ $ver = "Unreleased";
+ } else {
+ return { chl_ver => "Invalid changelog" };
+ }
+ return {
+ chl_ver => $ver,
+ chl_changer => $chl->Maintainer,
+ chl_date => $chl->Date,
+ chl_pkg => $chl->Source,
+ chl_native => scalar($chl->Version !~ /-./)
+ };
+}
+sub read_watch ($) {
+ my( $dir ) = @_;
+ debugmsg("Retrieving watch for $dir\n" );
+
+ my $svn_error;
+ my $watch;
+ {
+ my $watch_io = IO::Scalar->new(\$watch);
+ local $SVN::Error::handler = undef;
+ ($svn_error) = $svn->cat(
+ $watch_io,
+ "$SVN_REPO/trunk/$dir/debian/watch",
+ 'HEAD',
+ );
+ $watch_io->close();
+ }
+ if(SVN::Error::is_error($svn_error))
+ {
+ if($svn_error->apr_err() == $SVN::Error::FS_NOT_FOUND)
+ {
+ $svn_error->clear();
+ }
+ else
+ {
+ SVN::Error::croak_on_error($svn_error);
+ }
+ }
+ if( not $watch) {
+ return 'missing';
+ }
+
+ $watch =~ s/\\\n//gs;
+ my @watch_lines = split(/\n/, $watch) if $watch;
+ @watch_lines = grep( (!/^#/ and !/^version=/ and !/^\s*$/), @watch_lines );
+
+ my @watch;
+ foreach(@watch_lines)
+ {
+ debugmsg( " watch line $_\n" ) if 0;
+ # opts either contain no spaces, or is enclosed in double-quotes
+ my $opts = $1 if s!^\s*opts="([^"]*)"\s+!! or s!^\s*opts=(\S*)\s+!!;
+ debugmsg( " watch options = $opts\n" ) if $opts;
+ # several options are separated by comma and commas are not allowed within
+ my @opts = split(/\s*,\s*/, $opts) if $opts;
+ my %opts;
+ foreach(@opts)
+ {
+ next if /^(?:active|passive|pasv)$/;
+
+ /([^=]+)=(.*)/;
+ debugmsg( " watch option $1 = $2\n" );
+ if( $1 eq 'versionmangle' )
+ {
+ push @{ $opts{uversionmangle} }, $2;
+ push @{ $opts{dversionmangle} }, $2;
+ }
+ else
+ {
+ push @{ $opts{$1} }, $2;
+ }
+ }
+ s!^http://www.cpan.org/!$CPAN_MIRROR/!;
+ s!^ftp://www.cpan.org/!$CPAN_MIRROR/!;
+ s!^http://backpan.perl.org/authors/!$CPAN_MIRROR/authors/!;
+ s!^http://mirrors.kernel.org/cpan/!$CPAN_MIRROR/!;
+ s!^ftp://mirrors.kernel.org/cpan/!$CPAN_MIRROR/!;
+
+ push @watch, [ $_, \%opts ];
+ }
+
+ if( not @watch )
+ {
+ warn "invalid debian/watch" if 0;
+ return 'invalid';
+ }
+ debugmsg('Found valid debian/watch') if 0;
+ return ( 'valid', @watch );
+}
+
+my $header = <<_EOF;
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+ "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ <title>pkg-perl package versions</title>
+ <style type="text/css">
+ body {
+ background: white;
+ color: black;
+ }
+ table {
+ border: 1px solid black;
+ border-collapse: collapse;
+ empty-cells: show;
+ }
+ td, th {
+ border: 1px solid black;
+ }
+ .upload {
+ background: lightsalmon;
+ }
+ .bugs {
+ background: lightseagreen;
+ }
+ .upgrade {
+ background: lightblue;
+ }
+ </style>
+</head>
+<body>
+<table>
+<tr>
+<td>
+<table>
+<tr><th>Legend</th></tr>
+<tr><td class="upload">Needs uploading</td></tr>
+<tr><td class="bugs">Needs bug fixing</td></tr>
+<tr><td class="upgrade">Needs upgrade from upstream</td></tr>
+</table>
+</td>
+<td>
+ <a href="http://pkg-perl.alioth.debian.org/">http://pkg-perl.alioth.debian.org</a>
+</td>
+</tr>
+</table>
+
+<br>
+
+<table>
+<tr>
+ <th>Package</th>
+ <th>Repository</th>
+ <th>Archive</th>
+ <th>Bugs</th>
+ <th>Upstream</th>
+</tr>
+_EOF
+
+my $total = 0;
+my $total_shown = 0;
+my $chunk;
+
+# loop over packages
+my @svn_packages = sort(keys(%{$svn->ls("$SVN_REPO/trunk", 'HEAD', 0)}));
+my $cur_ver;
+$svn->info("$SVN_REPO/trunk", undef, "HEAD", sub {
+ $cur_ver = $_[1]->rev();
+ }, 0);
+
+my %maindata;
+my(@wmodified, @cmodified);
+if(not $force_rescan and from_cache(\%maindata, "maindata", 168)) { # 1 week
+ if($maindata{packages}) {
+ debugmsg("Converting maindata hash\n");
+ my %md;
+ $md{"//lastrev"} = $maindata{lastrev};
+ $md{$_} = $maindata{packages}{$_} foreach(
+ keys %{$maindata{packages}});
+ %maindata = %md;
+ }
+ $svn->log( ["$SVN_REPO/trunk"], $maindata{"//lastrev"}, "HEAD", 1, 1, sub {
+ return if($_[1] <= $maindata{"//lastrev"});
+ debugmsg("Scanning changes from revision $_[1]:\n");
+ foreach(keys %{$_[0]}) {
+ debugmsg("- $_\n");
+ if(m{^/?trunk/([^/]+)/debian/(changelog|watch)$}) {
+ if($2 eq "changelog") {
+ push @cmodified, $1;
+ } else {
+ push @wmodified, $1;
+ }
+ }
+ }
+ }
+ );
+}
+$maindata{"//lastrev"} = $cur_ver;
+foreach(@pkg_rescan) { # forced rescan of packages
+ push @wmodified, $_;
+ push @cmodified, $_;
+}
+foreach(@svn_packages) {
+ next if($maindata{$_});
+ $maindata{$_} = {};
+ push @wmodified, $_;
+ push @cmodified, $_;
+}
+my %tmp = map({ $_ => 1 } @cmodified); # eliminate dupes
+foreach my $pkg (keys %tmp) {
+ $maindata{$pkg} ||= {};
+ foreach(keys %{$maindata{$pkg}}) {
+ delete $maindata{$pkg}{$_} if(/^chl_/);
+ }
+ my $data = read_changelog($pkg);
+ foreach(keys %$data) {
+ $maindata{$pkg}{$_} = $data->{$_};
+ }
+ delete $maindata{$pkg}{watch_unmangled_ver};
+ if($maindata{$pkg}{chl_ver} and $maindata{$pkg}{watch}) {
+ my $up_svn = $maindata{$pkg}{chl_ver};
+ $up_svn =~ s/^(?:\d+:)?(.+?)(?:-[^-]+)?$/$1/;
+ $up_svn = unmangle_debian_version($up_svn, $maindata{$pkg}{watch});
+ $maindata{$pkg}{watch_unmangled_ver} = $up_svn;
+ }
+}
+if($cpan_updated) {
+ push @wmodified, grep(
+ { $maindata{$_}{watch_cpan} }
+ @svn_packages );
+}
+%tmp = map({ $_ => 1 } @wmodified); # eliminate dupes
+foreach(keys %tmp) {
+ my $pkg = $maindata{$_};
+ my($st, @data) = read_watch($_);
+ debugmsg("$_: $st ".scalar(@data)) if 0;
+ foreach(keys %{$pkg}) {
+ delete $pkg->{$_} if(/^watch_/);
+ }
+ $pkg->{watch_url} = "";
+ $pkg->{watch_ver} = "";
+ $pkg->{watch_unmangled_ver} = $pkg->{chl_ver};
+ unless($st eq "valid") {
+ if($st eq "missing" and $pkg->{chl_native}) {
+ $pkg->{watch_ver} = $pkg->{chl_ver};
+ } elsif($st eq "invalid") {
+ $pkg->{watch_ver} = "Invalid debian/watch";
+ $pkg->{watch_url} = qq(http://svn.debian.org/wsvn/pkg-perl/trunk/$_/debian/watch?op=file&rev=0&sc=0);
+ } else { # missing
+ $pkg->{watch_ver} = "Missing debian/watch";
+ }
+ next;
+ }
+ $pkg->{watch} = \@data;
+
+ my($upstream_ver, $upstream_url) = latest_upstream_from_cpan(\@data,
+ \%cpan_authors, \%cpan_modules, \%cpan_dists);
+ if( $upstream_ver ) {
+ $pkg->{watch_cpan} = 1;
+ } else {
+ ($upstream_ver, $upstream_url) = latest_upstream_from_watch(\@data);
+ }
+ if( $upstream_ver ) {
+ $pkg->{watch_ver} = $upstream_ver;
+ $pkg->{watch_url} = $upstream_url || "";
+ } else {
+ $pkg->{watch_ver} = "Invalid debian/watch";
+ $pkg->{watch_url} = qq(http://svn.debian.org/wsvn/pkg-perl/trunk/$_/debian/watch?op=file&rev=0&sc=0);
+ }
+ if($pkg->{chl_ver}) {
+ my $up_svn = $pkg->{chl_ver};
+ $up_svn =~ s/^(?:\d+:)?(.+?)(?:-[^-]+)?$/$1/;
+ $up_svn = unmangle_debian_version($up_svn, \@data);
+ $pkg->{watch_unmangled_ver} = $up_svn;
+ }
+}
+to_cache(\%maindata, "maindata");
+
+my @pkgs_to_check;
+my $showalways;
+if( @ARGV )
+{
+ @pkgs_to_check = @ARGV;
+ $showalways = 1;
+}
+else
+{
+ debugmsg(
+ sprintf(
+ "%d entries in trunk\n",
+ scalar(@svn_packages)
+ ),
+ );
+ @pkgs_to_check = @svn_packages;
+}
+
+print $header;
+foreach ( @pkgs_to_check )
+{
+ $total++;
+
+ my $pkgd = $maindata{$_};
+ my $spkg = $maindata{$_}{chl_pkg} or die "No source package for $_?";
+ debugmsg("Examining $_ (src:$spkg)\n" );
+
+ debugmsg(sprintf(" - Archive has %s\n", $packages{$spkg} || 'none'));
+ debugmsg(sprintf(" - experimental has %s\n",
+ $experimental{$spkg} || 'none'));
+ debugmsg(sprintf(" - stable has %s\n", $stable{$spkg} || 'none'));
+ debugmsg(sprintf(" - oldstable has %s\n", $oldstable{$spkg} || 'none'));
+ debugmsg(sprintf(" - incoming has %s\n", $incoming{$spkg} || 'none' ));
+ debugmsg(sprintf(" - NEW has %s\n", $new{$spkg} || 'none'));
+ debugmsg(sprintf(" - Bug number: %d\n", $bugs{$spkg} || 'none'));
+ debugmsg(sprintf(" - %s has %s (%s)\n",
+ $pkgd->{watch_cpan} ? "CPAN" : "upstream",
+ $pkgd->{watch_ver} || 'none', $pkgd->{watch_url} || 'no url'));
+ debugmsg(sprintf(" - SVN has %s (upstream version=%s)\n",
+ $pkgd->{chl_ver} || 'none', $pkgd->{watch_unmangled_ver} || 'none'));
+
+ next unless($showalways or
+ $pkgd->{watch_unmangled_ver} ne $pkgd->{watch_ver}
+ or
+ ($bugs{$spkg} and $bugs{$spkg} > 0)
+ or
+ (! $packages{$spkg} or $pkgd->{chl_ver} ne $packages{$spkg})
+ and
+ (! $incoming{$spkg} or $pkgd->{chl_ver} ne $incoming{$spkg})
+ and
+ (! $new{$spkg} or $pkgd->{chl_ver} ne $new{$spkg})
+ );
+ $total_shown++;
+ my $text = "<tr>\n";
+ $text .= "<td>".(
+ ($packages{$spkg})
+ ? qq(<a href="http://packages.qa.debian.org/$spkg">$spkg</a>)
+ : qq($spkg)
+ )."</td>\n";
+
+ $text .= "<td".(
+ (! $packages{$spkg} or $pkgd->{chl_ver} ne $packages{$spkg})
+ ? ' class="upload">'
+ : '>');
+ $text .= qq(<a href="http://svn.debian.org/wsvn/pkg-perl/trunk/$_/debian/changelog?op=file&rev=0&sc=0" title=") . $pkgd->{chl_changer} . "\n" . $pkgd->{chl_date} . "\">" .$pkgd->{chl_ver} . "</a></td>\n";
+
+ my $archive_text = join(
+ "\n",
+ $packages{$spkg}||(),
+ (
+ ($incoming{$spkg})
+ ? "Incoming: $incoming{$spkg}"
+ : ()
+ ),
+ (
+ ($new{$spkg})
+ ? "NEW: $new{$spkg}"
+ : ()
+ ),
+ (
+ ($experimental{$spkg})
+ ? "experimental: $experimental{$spkg}"
+ : ()
+ ),
+ (
+ ($stable{$spkg} and not $packages{$spkg} and not $experimental{$spkg})
+ ? "stable: $stable{$spkg}"
+ : ()
+ ),
+ (
+ ($oldstable{$spkg} and not $stable{$spkg} and not $packages{$spkg} and not $experimental{$spkg})
+ ? "oldstable: $oldstable{$spkg}"
+ : ()
+ ),
+ );
+
+ $archive_text = qq(<a href="http://packages.qa.debian.org/$spkg">$archive_text</a>) if $packages{$spkg} or $experimental{$spkg} or $stable{$spkg} or $oldstable{$spkg};
+
+ $text .= "<td>$archive_text</td>\n";
+
+ my $bug_text = qq(<a href="http://bugs.debian.org/src:$spkg">$bugs{$spkg}</a>);
+
+ $text .= (
+ $bugs{$spkg} > 0
+ ? qq(<td class="bugs">$bug_text</td>\n)
+ : "<td></td>\n"
+ );
+
+ my $upstream_text = (
+ $pkgd->{watch_cpan} ? "CPAN: " : "") . $pkgd->{watch_ver};
+ $upstream_text = qq(<a href=") . $pkgd->{watch_url} . qq(">$upstream_text</a>) if $pkgd->{watch_url};
+
+ $text .= (
+ ($pkgd->{watch_unmangled_ver} ne $pkgd->{watch_ver})
+ ? qq(<td class="upgrade">$upstream_text</td>\n)
+ : "<td></td>\n"
+ );
+ $text .= "</tr>\n";
+ print $text;
+}
+
+my $date = gmtime;
+my $footer = <<_EOF;
+<tr><td colspan=\"4\"><b>TOTAL: $total_shown/$total</b></td></tr>
+</table>
+<hr>
+$date UTC<br>
+<i>$THIS_REVISION</i>
+</body>
+_EOF
+
+print $footer;
+
+unlink $lockfile or die $!;
+
+# vim: et:sts=4:ai:sw=4
Property changes on: trunk/community/qa/oldscripts/versioncheck3.pl
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/packagecheck
===================================================================
--- trunk/community/qa/packagecheck (rev 0)
+++ trunk/community/qa/packagecheck 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,386 @@
+#!/bin/sh
+
+# Copyright 2007, 2008 gregor herrmann <gregor+debian at comodo.priv.at>
+# Copyright 2007, 2008 Damyan Ivanov <dmn at debian.org>
+# Copyright 2007 David Paleino <d.paleino at gmail.com>
+# Released under the terms of the GNU GPL version 2
+#
+# To be run a directory above trunk/
+# (which name can be specified as the first argument)
+
+
+#############
+# functions #
+#############
+
+usage() {
+ [ -n "$1" ] && echo "ERROR: $1" && echo
+ echo "Usage:"
+ echo " $(basename $0) -{VHMWCR|A|h} {-c | [-p pkg] trunk}"
+ echo
+ echo " At least one parameter must be present."
+ echo
+ echo " Parameters:"
+ echo " -V - debian/control: add _V_cs-(Svn|Browser) fields;"
+ echo " remove XS-Vcs-(Svn|Browser) fields"
+ echo " -H - debian/control: add _H_omepage field; remove"
+ echo " pseudo-field Homepage"
+ echo " -M - debian/control: check _M_aintainer field for"
+ echo " Debian Perl Group <pkg-perl-maintainers at lists.alioth.debian.org>"
+ echo " -W - debian/_w_atch: change CPAN URLs to"
+ echo " http://search.cpan.org/dist/Mod-Ule/"
+ echo " -C - if -W is given, create debian/watch if it does not"
+ echo " exist"
+ echo " -R - debian/rules: _r_mdir /usr/\{lib,share\}/perl5"
+ echo " only if they exist"
+ echo " -A - all checks"
+ echo
+ echo " -p <pkg> - check only package <pkg>"
+ echo " -h - this help"
+ echo " -c - test only the package that is checked out in the"
+ echo " current working directory"
+ exit 1
+}
+
+# given source directory, try to find out the cannonical distribution name
+detect_dist() {
+ DIR=$1
+ local PERLNAME
+ PERLNAME=''
+ if [ -s $DIR/Build.PL ]; then
+ PERLNAME=$(perl -n -e "print if s;^.*module_name.*=>.*['\"[]([a-zA-Z0-9:_-]+)[]'\"].*\$;\$1;" $DIR/Build.PL | sed -e 's/::/-/g' | head -n 1)
+ fi
+ if [ -s $DIR/Makefile.PL ]; then
+ PERLNAME=$(perl -n -e "print if s;^.*(?:DIST)?NAME.*=>.*['\"[]([a-zA-Z0-9:_-]+)[]'\"].*\$;\$1;" $DIR/Makefile.PL | sed -e 's/::/-/g' | head -n 1)
+ fi
+ if [ -s $DIR/META.yml ]; then
+ PERLNAME=$(perl -n -e "print if s;^name:.* ([a-zA-Z0-9:_-]+).*\$;\$1;" $DIR/META.yml | head -n 1)
+ fi
+ if [ -n "$PERLNAME" ]; then
+ if curl --silent http://search.cpan.org/dist/$PERLNAME/ | grep '<title>.*</title>' | grep --silent $PERLNAME; then
+ echo $PERLNAME
+ fi
+ fi
+}
+
+testvcs() {
+ DIR=$1
+ PKG=$(basename $(realpath $DIR))
+ # check for and add missing Vcs-Svn field
+ if ! grep ^Vcs-Svn $DIR/debian/control > /dev/null; then
+ echo "$PKG: adding missing Vcs-Svn field"
+ perl -pi -e "s;(Standards-Version:.+);\$1\nVcs-Svn: svn://svn.debian.org/pkg-perl/trunk/$PKG/;" $DIR/debian/control
+ MSG_CONTROL_ADD="${MSG_CONTROL_ADD:+$MSG_CONTROL_ADD; }Vcs-Svn field (source stanza)"
+ CHANGED=1
+ fi
+
+ # check for and add missing Vcs-Browser field
+ if ! grep ^Vcs-Browser $DIR/debian/control > /dev/null; then
+ echo "$PKG: adding missing Vcs-Browser field"
+ perl -pi -e "s;(^Vcs-Svn:.+);\$1\nVcs-Browser: http://svn.debian.org/wsvn/pkg-perl/trunk/$PKG/;" $DIR/debian/control
+ MSG_CONTROL_ADD="${MSG_CONTROL_ADD:+$MSG_CONTROL_ADD; }Vcs-Browser field (source stanza)"
+ CHANGED=1
+ fi
+
+ # remove old XS-Vcs-(Svn|Browser) fields
+ if grep ^XS-Vcs- $DIR/debian/control > /dev/null; then
+ echo "$PKG: removing old XS-Vcs-* fields"
+ sed -i -e '/^XS-Vcs-/ d' $DIR/debian/control
+ MSG_CONTROL_RM="${MSG_CONTROL_RM:+$MSG_CONTROL_RM; }XS-Vcs-Svn fields (source stanza)"
+ CHANGED=1
+ fi
+}
+
+testhomepage() {
+ DIR=$1
+ PKG=$(basename $(realpath $DIR))
+ # check for and remove old Homepage from long description
+ OLDHP=$(egrep "^ Homepage: " $DIR/debian/control | egrep -o "http.+")
+ if [ -n "$OLDHP" ] ; then
+ echo "$PKG: removing Homepage: pseudo-field from Description"
+ perl -e "undef \$/; my \$buf=<STDIN>; \$buf =~ s/\n \.\n Homepage: .*//; print \$buf" < $DIR/debian/control > $DIR/debian/control.new
+ mv $DIR/debian/control.new $DIR/debian/control
+ MSG_CONTROL_RM="${MSG_CONTROL_RM:+$MSG_CONTROL_RM; }Homepage pseudo-field (Description)"
+ CHANGED=1
+ NEWHP=$OLDHP
+ fi
+
+ # check for and add missing new Homepage to source stanza
+ if ! egrep "^Homepage: " $DIR/debian/control > /dev/null; then
+ echo "$PKG: trying to add missing Homepage field to source stanza"
+
+ # only construct new URL if we don't have a "real one"
+ if [ -z "$NEWHP" ] || echo "$NEWHP" | grep cpan\.org > /dev/null; then
+ PERLNAME=`detect_dist`
+ if [ -n "$PERLNAME" ]; then
+ NEWHP="http://search.cpan.org/dist/$PERLNAME/"
+ fi
+
+ # get NEWHP from somewhere else? debian/watch? debian/copyright?
+
+ fi
+
+ if [ -n "$NEWHP" ]; then
+ perl -pi -e "s;(Standards-Version:.+);\$1\nHomepage: $NEWHP;" $DIR/debian/control
+ MSG_CONTROL_ADD="${MSG_CONTROL_ADD:+$MSG_CONTROL_ADD; }Homepage field (source stanza)"
+ CHANGED=1
+ fi
+ fi
+
+}
+
+testmaintainer() {
+ DIR=$1
+ PKG=$(basename $(realpath $DIR))
+ # get Maintainer, check and change
+ OLDMAINT=$(grep ^Maintainer: $DIR/debian/control | cut -f2- -d" ")
+ if [ "$OLDMAINT" != "Debian Perl Group <pkg-perl-maintainers at lists.alioth.debian.org>" ] ; then
+ echo "$PKG: setting Maintainer to Debian Perl Group <pkg-perl-maintainers at lists.alioth.debian.org>"
+ perl -pi -e "s;^Maintainer:.+;Maintainer: Debian Perl Group <pkg-perl-maintainers\@lists.alioth.debian.org>;" $DIR/debian/control
+ MSG_CONTROL_CH="${MSG_CONTROL_CH:+$MSG_CONTROL_CH; }Maintainer set to Debian Perl Group <pkg-perl-maintainers at lists.alioth.debian.org> (was: $OLDMAINT)"
+ # keep old Maintainer in Uploaders unless it's the group in some other form
+ # TODO: remove DPG from Uploaders if we've added it to Maintainer
+ if ! echo $OLDMAINT | grep pkg-perl-maintainers ; then
+ if grep Uploaders $DIR/debian/control > /dev/null; then
+ perl -pi -e "BEGIN { our \$m=shift @ARVG }; s;(Uploaders:.+);\$1, \$m;" "$OLDMAINT" $DIR/debian/control
+ else
+ perl -pi -e "BEGIN { our \$m=shift @ARGV }; s;(Maintainer:.+);\$1\nUploaders: \$m;" "${OLDMAINT}" $DIR/debian/control
+ fi
+ MSG_CONTROL_CH="${MSG_CONTROL_CH:+$MSG_CONTROL_CH; }$OLDMAINT moved to Uploaders"
+ fi
+ CHANGED=1
+ fi
+}
+
+testwatchdist() {
+ DIR=$1
+ PKG=$(basename $(realpath $DIR))
+ # watchfile
+ if [ -e $DIR/debian/watch ] && ! grep search\.cpan\.org/dist/ $DIR/debian/watch >/dev/null; then
+ echo "$PKG: trying to change URL in debian/watch"
+ if perl -i -e "my \$changed=1; while(<>){ \$changed=0 if s{(?:^|\s+)(?:ht|f)tp://.*cpan.+/\s*(\S+)-(?:\S+)(\s.+)?$}{http://search.cpan.org/dist/\$1/ .*/\$1-v?(\\\\d[\\\\d_.]+)\\\\.(?:tar(?:\\\\.gz|\\\\.bz2)?|tgz|zip)\$2}i; print;} exit \$changed" $DIR/debian/watch ; then
+ perl -pi -e "s;^version=2;version=3;" $DIR/debian/watch
+ MSG_WATCH="debian/watch: use dist-based URL."
+ CHANGED=1
+ fi
+ elif [ ! -e $DIR/debian/watch ] && [ -n "$CREATE_WATCH" ]; then
+ echo "$PKG: creating debian/watch"
+ if dist_name=`detect_dist $DIR`; then
+ version_re='v?(\d[\d_.-]+)\.(?:tar(?:\.gz|\.bz2)?|tgz|zip)'
+ echo "version=3" > $DIR/debian/watch
+ echo "http://search.cpan.org/dist/$dist_name/ .+/$dist_name-$version_re\$" >> $DIR/debian/watch
+ svn add $DIR/debian/watch
+
+ MSG_WATCH="Add debian/watch."
+ CHANGED=1
+ else
+ echo "ERROR: unable to find distribution name"
+ fi
+ fi
+}
+
+testrmdir() {
+ DIR=$1
+ PKG=$(basename $(realpath $DIR))
+ # handle rmdir /usr/{share,lib}/perl5
+ if egrep -m 1 "(rmdir.*ignore-fail-on-non-empty|rm -r.*usr/(lib|share)(/perl5)?$)" $DIR/debian/rules | grep -v "\[ \! -d" > /dev/null ; then
+ ARCH=$(grep -m 1 -h "Architecture:" $DIR/debian/control | awk '{print $2;}')
+ case $ARCH in
+ any)
+ DELDIR="/share/perl5"
+ ;;
+ all)
+ DELDIR="/lib/perl5"
+ ;;
+ *)
+ ;;
+ esac
+ echo "$PKG: trying to make rmdir /usr${DELDIR} conditional"
+ if perl -i -e "my \$changed=1; while(<>){ \$changed=0 if s{rmdir.*ignore-fail-on-non-empty.*\s(\S+)$DELDIR}{[ ! -d \$1${DELDIR} ] || rmdir --ignore-fail-on-non-empty --parents --verbose \$1${DELDIR}}; print;} exit \$changed" $DIR/debian/rules ; then
+ MSG_RULES="debian/rules: delete /usr${DELDIR} only if it exists." && \
+ CHANGED=1
+ fi
+ if perl -i -e "my \$changed=1; while(<>){ \$changed=0 if s{-?rm -r.* (.*usr)/(?:lib|share)(?:/perl5)?\$}{[ ! -d \$1${DELDIR} ] || rmdir --ignore-fail-on-non-empty --parents --verbose \$1${DELDIR}}; print;} exit \$changed" $DIR/debian/rules ; then
+ MSG_RULES="debian/rules: delete /usr${DELDIR} only if it exists." && \
+ CHANGED=1
+ fi
+ if perl -i -e "my \$changed=1; while(<>){ \$changed=0 if s{-?find.+xargs.+rmdir.+}{[ ! -d \\\$(CURDIR)/debian/\\\$(shell dh_listpackages)/usr${DELDIR} ] || rmdir --ignore-fail-on-non-empty --parents --verbose \\\$(CURDIR)/debian/\\\$(shell dh_listpackages)/usr${DELDIR}}; print;} exit \$changed" $TRUNK/$1/debian/rules ; then
+ MSG_RULES="debian/rules: delete /usr${DELDIR} only if it exists." && \
+ CHANGED=1
+ fi
+ fi
+}
+
+
+########
+# main #
+########
+
+# parse options
+
+[ $# -ge 1 ] || usage "No parameter."
+
+ONLY_CURDIR=""
+
+while getopts p:cVHMWCRAh O; do
+ case "$O" in
+ p)
+ PKG=$OPTARG
+ ;;
+ c)
+ ONLY_CURDIR=1
+ ;;
+ V)
+ TESTVCS=1
+ ;;
+ M)
+ TESTMAINTAINER=1
+ ;;
+ H)
+ TESTHOMEPAGE=1
+ ;;
+ W)
+ TESTWATCHDIST=1
+ ;;
+ C)
+ CREATE_WATCH=1
+ ;;
+ R)
+ TESTRMDIR=1
+ ;;
+ A)
+ TESTVCS=1
+ TESTHOMEPAGE=1
+ TESTMAINTAINER=1
+ TESTWATCHDIST=1
+ TESTRMDIR=1
+ ;;
+ h)
+ usage
+ ;;
+ *)
+ usage "Unknown parameter."
+ ;;
+ esac
+done
+shift $(($OPTIND - 1)) # bash: shift $((OPTIND - 1))
+
+
+check_package()
+{
+ # reset variables
+ p=$1
+ OLDHP=
+ PERLNAME=
+ NEWHP=
+ OLDMAINT=
+ MSG_CONTROL=
+ MSG_CONTROL_ADD=
+ MSG_CONTROL_RM=
+ MSG_CONTROL_CH=
+ MSG_WATCH=
+ MSG_RULES=
+
+ # TESTVCS - -V debian/control: add _V_cs-(Svn|Browser) fields; remove XS-Vcs-(Svn|Browser) field
+ [ "$TESTVCS" = 1 ] && testvcs $p
+
+ # TESTHOMEPAGE - -H debian/control: add _H_omepage field; remove pseudo-field Homepage
+ [ "$TESTHOMEPAGE" = 1 ] && testhomepage $p
+
+ # TESTMAINTAINER - -H debian/control: check _M_aintainer field for "Debian Perl Group <pkg-perl-maintainers at lists.alioth.debian.org>"
+ [ "$TESTMAINTAINER" = 1 ] && testmaintainer $p
+
+ # TESTWATCHDIST - -W debian/_w_atch: change CPAN URLs to http://search.cpan.org/dist/Mod-Ule/
+ [ "$TESTWATCHDIST" = 1 ] && testwatchdist $p
+
+ # TESTRMDIR - -R debian/rules: _r_mdir /usr/\{lib,share\}/perl5 only if they exist
+ [ "$TESTRMDIR" = 1 ] && testrmdir $p
+
+ # changelog
+ if [ -n "$MSG_CONTROL_ADD" -o -n "$MSG_CONTROL_RM" -o -n "$MSG_CONTROL_CH" ] ; then
+ MSG_CONTROL="debian/control:"
+ [ -n "$MSG_CONTROL_ADD" ] && MSG_CONTROL="$MSG_CONTROL Added: $MSG_CONTROL_ADD."
+ [ -n "$MSG_CONTROL_RM" ] && MSG_CONTROL="$MSG_CONTROL Removed: $MSG_CONTROL_RM."
+ [ -n "$MSG_CONTROL_CH" ] && MSG_CONTROL="$MSG_CONTROL Changed: $MSG_CONTROL_CH."
+ dch --mainttrailer --release-heuristic=changelog --changelog $p/debian/changelog "$MSG_CONTROL"
+ fi
+ if [ -n "$MSG_WATCH" ] ; then
+ dch --mainttrailer --release-heuristic=changelog --changelog $p/debian/changelog "$MSG_WATCH"
+ fi
+ if [ -n "$MSG_RULES" ] ; then
+ dch --mainttrailer --release-heuristic=changelog --changelog $p/debian/changelog "$MSG_RULES"
+ fi
+}
+
+# start the game
+
+CHANGED=0
+TRUNK=${1:-trunk}
+
+if [ -n "$ONLY_CURDIR" ]; then
+ WORK_DIR="."
+elif [ -n "$PKG" ]; then
+ WORK_DIR=$TRUNK/$PKG
+else
+ WORK_DIR=$TRUNK
+fi
+
+echo "Running svn up $WORK_DIR ..."
+svn up $WORK_DIR
+
+echo "Checking if $WORK_DIR is clean ..."
+UNCLEAN=$(svn st $WORK_DIR |egrep -v '^\?')
+if [ -n "$UNCLEAN" ]; then
+ echo "$UNCLEAN"
+ echo WARNING: $WORK_DIR is not clean
+fi
+
+if [ -n "$ONLY_CURDIR" ]; then
+ check_package .
+elif [ -n "$PKG" ]; then
+ check_package $TRUNK/$PKG
+else
+ # loop over packages
+
+ echo "Grepping through packages ..."
+ for PKG in $(svn ls $TRUNK); do
+
+ PKG=${PKG%/}
+ check_package $TRUNK/$PKG
+
+ done
+fi
+
+# work is done. svn diff? svn commit?
+
+if [ "$CHANGED" = "1" ]; then
+
+ read -p "Show svn diff $WORK_DIR (y|N)? " DIFF
+ case $DIFF in
+ y|Y)
+ svn diff $WORK_DIR | less
+ ;;
+ *)
+ ;;
+ esac
+
+ if [ -n "$UNCLEAN" ]; then
+ echo $WORK_DIR was not clean at start. Please commit manually.
+ else
+ read -p "Commit $WORK_DIR (y|N)? " COMMIT
+ case $COMMIT in
+ y|Y)
+ svn ci -m "[packagecheck] fixed Vcs-(Svn|Browser)/Homepage field(s) in debian/control and/or URL in debian/watch and/or rmdir /usr/{lib|share}/perl5 in debian/rules." $WORK_DIR
+ ;;
+ *)
+ ;;
+ esac
+ fi
+
+else
+ echo "Nothing changed."
+fi
+
+exit 0
+
+# vi: set noet sts=0 sw=8:
Property changes on: trunk/community/qa/packagecheck
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/qareport
===================================================================
--- trunk/community/qa/qareport (rev 0)
+++ trunk/community/qa/qareport 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,129 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: qareport 11877 2007-12-31 06:48:26Z tincho-guest $
+#
+# Draft of a report
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+#use DebianQA::Cache;
+use DebianQA::Classification;
+#use DebianQA::Common;
+use DebianQA::Config;
+#use DebianQA::DebVersions;
+use DebianQA::Svn;
+use Getopt::Long;
+
+my $p = new Getopt::Long::Parser;
+$p->configure(qw(no_ignore_case bundling pass_through));
+
+my $list_is_dirs = 0;
+my $show_all = 0;
+$p->getoptions('help|h|?' => \&help, 'directories!' => \$list_is_dirs,
+ 'showall|a!' => \$show_all
+ ) or die "Error parsing command-line arguments!\n";
+
+my $opts = getopt_common(0, 1); # No passthru, load config
+
+my @dirs = @ARGV;
+
+if($list_is_dirs) {
+ foreach my $dir (@dirs) {
+ $dir = svndir2pkgname($dir) || $dir; # Fallback
+ }
+}
+
+my @pkglist = @dirs;
+ at pkglist = get_pkglist() unless(@pkglist);
+my $csfy = classify(@pkglist);
+unless($show_all) {
+ foreach(keys %$csfy) {
+ delete $csfy->{$_} unless(%{$csfy->{$_}{hilight}});
+ }
+}
+print("Showing ", scalar keys %$csfy, " out of ", scalar @pkglist,
+ " packages\n");
+foreach my $pkg (sort keys %$csfy) {
+ my %data = %{$csfy->{$pkg}};
+ print "$pkg:";
+ if($pkg ne $data{svn_path}) {
+ print " (SVN: $data{svn_path})";
+ }
+ print " ", $data{svn}{short_descr} if($data{svn}{short_descr});
+ print "\n";
+ if(%{$data{status}}) {
+ print " - Problems: ", join(", ", keys %{$data{status}}), "\n";
+ }
+ if(@{$data{notes}}) {
+ print " - Notes: ", join(", ", @{$data{notes}}), "\n";
+ }
+ print " - Repository status: ";
+ if($data{hilight}{svn}) {
+ print join(", ", keys %{$data{hilight}{svn}}), "\n";
+ } else {
+ print "OK\n";
+ }
+ if($data{svn}{version}) {
+ print " + Latest released: $data{svn}{version} ";
+ print "($data{svn}{changer})\n";
+ }
+ if($data{svn}{un_version}) {
+ print " + Latest unreleased: $data{svn}{un_version}\n";
+ }
+ #
+ print " - Debian archive status: ";
+ if($data{hilight}{archive}) {
+ print join(", ", keys %{$data{hilight}{archive}}), "\n";
+ } else {
+ print "OK\n";
+ }
+ if($data{archive}{most_recent}) {
+ print " + Latest version: $data{archive}{most_recent} ";
+ print "(from $data{archive}{most_recent_src})\n";
+ }
+ #
+ print " - BTS status: ";
+ if($data{hilight}{bts}) {
+ print join(", ", keys %{$data{hilight}{bts}}), "\n";
+ } else {
+ print "OK\n";
+ }
+ foreach(keys %{$data{bts}}) {
+ print " + Bug #$_ - $data{bts}{$_}{subject}\n";
+ }
+ #
+ print " - Upstream status: ";
+ if($data{hilight}{upstream}) {
+ print join(", ", keys %{$data{hilight}{upstream}}), "\n";
+ } else {
+ print "OK\n";
+ }
+ print " + URL: $data{upstream_url}\n" if($data{upstream_url});
+ if($data{watch}{upstream_version}) {
+ print " + Latest version: $data{watch}{upstream_version}\n";
+ }
+}
+#use Data::Dumper; print Dumper $data;
+
+sub help {
+ print <<END;
+Usage:
+ $0 [options] [dirname [dirname ...]]
+
+ For each named directory, updates the databases with information retrieved
+ from the Debian archive, BTS, watchfiles and the Subversion repository.
+
+Options:
+ --help, -h This help.
+ --conf, -c FILE Specifies a configuration file, uses defaults if not
+ present.
+ --directories Treat the parameters as repository directory names, instead
+ of source package names.
+ --showall Show status of all packages, including OK packages.
+
+END
+ exit 0;
+}
Property changes on: trunk/community/qa/qareport
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/qareport-chlog.cgi
===================================================================
--- trunk/community/qa/qareport-chlog.cgi (rev 0)
+++ trunk/community/qa/qareport-chlog.cgi 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,62 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: qareport-chlog.cgi 11907 2008-01-02 12:19:39Z dmn $
+#
+# Report packages version states
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+use DebianQA::Config qw(read_config %CFG);
+use DebianQA::Svn;
+use CGI ':fatalsToBrowser';
+use CGI;
+
+read_config();
+
+my $cgi = new CGI;
+
+
+if( $ENV{GATEWAY_INTERFACE} )
+{
+ print $cgi->header(
+ -content_type => 'text/html; charset=utf-8',
+ );
+}
+
+my $pkg = $cgi->param('pkg') or exit 0;
+my $rel = $cgi->param('rel') || '';
+
+my $svn = svn_get();
+
+my $text = $svn->{$pkg}{ ($rel eq 'rel')?'text' : 'un_text' };
+
+$text =~ s/&/&/g;
+$text =~ s/'/"/g;
+$text =~ s/</</g;
+$text =~ s/>/>/g;
+$text =~ s{\r?\n}{<br/>}g;
+
+# replace bug-numbers with links
+$text =~ s{
+ ( # leading
+ ^ # start of string
+ |\W # or non-word
+ )
+ \#(\d+) # followed by a bug ID
+ \b # word boundary
+}
+{$1<a href="http://bugs.debian.org/$2">#$2</a>}xgm;
+# treat text as multi-line
+# Same for CPAN's RT
+$text =~ s{\bCPAN#(\d+)\b}
+{<a href="http://rt.cpan.org/Ticket/Display.html?id=$1">CPAN#$1</a>}gm;
+
+print qq(<a style="float: right; margin: 0 0 1pt 1pt; clear: none;" href="javascript:more_chlog('$pkg', '$rel')">reload</a>\n);
+print qq(<code style="white-space: pre">$text</code>);
+
+exit 0;
+
Property changes on: trunk/community/qa/qareport-chlog.cgi
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/qareport.cgi
===================================================================
--- trunk/community/qa/qareport.cgi (rev 0)
+++ trunk/community/qa/qareport.cgi 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,155 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: qareport.cgi 13063 2008-01-21 03:01:53Z tincho-guest $
+#
+# Report packages version states
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+use DebianQA::Cache;
+use DebianQA::Classification;
+use DebianQA::Config qw(read_config %CFG);
+use DebianQA::Svn;
+use CGI ();
+use CGI::Carp qw(fatalsToBrowser);
+use POSIX qw(locale_h);
+use Template ();
+use Date::Parse ();
+
+read_config();
+
+my $cgi = new CGI;
+
+my $cache = read_cache(consolidated => "");
+my $script_date = '$Date: 2008-01-21 04:01:53 +0100 (lun, 21 gen 2008) $';
+$script_date = join( ' ', (split(/ /, $script_date))[1..3] );
+my @modified = sort(
+ map(
+ {
+ # Each key of the consolidated cache works like a root cache
+ find_stamp($cache->{$_}, "")
+ } qw(svn watch archive bts pkglist),
+ ),
+ Date::Parse::str2time($script_date),
+);
+my $last_modified = $modified[-1];
+my $ims;
+my @pkglist = get_pkglist();
+my $cls = classify(@pkglist);
+
+my( @no_prob, @for_upload, @for_upgrade, @weird, @waiting, @wip, @with_bugs,
+ @all );
+
+unless($cgi->param("show_all"))
+{
+ foreach(keys %$cls)
+ {
+ delete $cls->{$_} unless(%{$cls->{$_}{hilight}});
+ }
+}
+
+foreach my $pkg (sort keys %$cls)
+{
+ my $data = $cls->{$pkg};
+
+ my $dest; # like "destiny" :)
+ my %info = (
+ name => $pkg,
+ map(
+ ($_=>$data->{$_}),
+ qw( watch archive svn bts notes hilight ),
+ ),
+ );
+ my $status = $data->{status}; # to save some typing
+
+ $dest ||= \@for_upgrade if $status->{needs_upgrade};
+ $dest ||= \@wip if $status->{not_finished} or $status->{invalid_svn_version};
+ $dest ||= \@for_upload if $status->{needs_upload} or $status->{never_uploaded};
+ $dest ||= \@weird if $status->{repo_ancient} or $status->{svn_ancient}
+ or $status->{upstream_ancient};
+ $dest ||= \@wip if $status->{watch_error};
+ $dest ||= \@waiting if $status->{archive_waiting};
+ $dest ||= \@with_bugs if $status->{has_bugs};
+ $dest ||= \@no_prob;
+
+ push @$dest, \%info;
+ push @all, \%info;
+}
+
+if( $ENV{GATEWAY_INTERFACE} )
+{
+ my $htmlp = $cgi->Accept("text/html");
+ my $xhtmlp = $cgi->Accept("application/xhtml+xml");
+
+ $ims = $cgi->http('If-Modified-Since');
+ $ims = Date::Parse::str2time($ims) if $ims;
+
+ if( $ims and $ims >= $last_modified )
+ {
+ print $cgi->header('text/html', '304 Not modified');
+ exit 0;
+ }
+
+ my $old_locale = setlocale(LC_TIME);
+ setlocale(LC_TIME, "C");
+ print $cgi->header(
+ -content_type => (
+ ($xhtmlp and $xhtmlp > $htmlp)
+ ? 'application/xhtml+xml; charset=utf-8'
+ : 'text/html; charset=utf-8'
+ ),
+ -last_modified => POSIX::strftime(
+ "%a, %d %b %Y %T %Z",
+ gmtime($last_modified),
+ ),
+ $cgi->param("refresh") ? (-refresh => $cgi->param("refresh")) : (),
+ );
+ setlocale(LC_TIME, $old_locale);
+}
+
+my $template = $cgi->param("template") || $CFG{qareport_cgi}{default_template};
+my $tt = new Template(
+ {
+ INCLUDE_PATH => $CFG{qareport_cgi}{templates_path},
+ INTERPOLATE => 1,
+ POST_CHOMP => 1,
+ FILTERS => {
+ 'quotemeta' => sub { quotemeta(shift) },
+ },
+ }
+);
+
+$tt->process(
+ $template,
+ {
+ data => $cls,
+ group_name => $CFG{qareport_cgi}{group_name},
+ group_url => $CFG{qareport_cgi}{group_url},
+ wsvn_url => $CFG{qareport_cgi}{wsvn_url},
+ (
+ ( ($cgi->param('format')||'') eq 'list' )
+ ? (
+ all => \@all
+ )
+ : (
+ all => \@no_prob,
+ for_upgrade => \@for_upgrade,
+ weird => \@weird,
+ for_upload => \@for_upload,
+ waiting => \@waiting,
+ wip => \@wip,
+ with_bugs => \@with_bugs,
+ )
+ ),
+ shown_packages => scalar(@all),
+ total_packages => scalar(@pkglist),
+# params => scalar($cgi->Vars()),
+ },
+) || die $tt->error;
+
+exit 0;
+
Property changes on: trunk/community/qa/qareport.cgi
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/svncruftcheck
===================================================================
--- trunk/community/qa/svncruftcheck (rev 0)
+++ trunk/community/qa/svncruftcheck 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007, 2008
+# Released under the terms of the GNU GPL 2
+
+. commoncheck
+
+echo "CHECK: $REPO/branches/upstream"
+for p in $(svn ls $REPO/branches/upstream); do
+ p=${p%/}
+ echo " $p"
+ if ! svn ls $REPO/trunk/$p >/dev/null 2>&1 ; then
+ echo "$p is in $REPO/branches/upstream but not in $REPO/trunk"
+ fi
+done
+
+echo "CHECK: $REPO/tags"
+for p in $(svn ls $REPO/tags); do
+ p=${p%/}
+ echo " $p"
+ if ! svn ls $REPO/trunk/$p >/dev/null 2>&1 ; then
+ echo "$p is in $REPO/tags but not in $REPO/trunk"
+ fi
+done
Property changes on: trunk/community/qa/svncruftcheck
___________________________________________________________________
Name: svn:executable
+ *
Added: trunk/community/qa/templates/by_category
===================================================================
--- trunk/community/qa/templates/by_category (rev 0)
+++ trunk/community/qa/templates/by_category 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,375 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- vim:ts=4:sw=4:et:ai:sts=4:syntax=xhtml
+-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+[% USE CGI %]
+[% SET hide_binaries = CGI.param("hide_binaries") %]
+[% SET start_collapsed = CGI.param("start_collapsed") %]
+[% BLOCK bts_link %]
+[% IF pkg.bts.size %]
+<div class="bts-info"><div class=" popup"><a href="http://bugs.debian.org/$pkg.name"
+>$pkg.bts.keys.size</a>
+<!-- span class="paren">[</span -->
+<table class="bts-info-details balloon">
+[% FOREACH bug IN pkg.bts.nsort %]
+<tr>
+ <td>
+ <a class="bts-${pkg.bts.$bug.severity}"
+ href="http://bugs.debian.org/$bug">#$bug</a>
+ [% IF pkg.bts.$bug.forwarded %]
+ [% SET F = pkg.bts.$bug.forwarded %]
+ [% qm = BLOCK %]^https?:[% FILTER quotemeta %]//rt.cpan.org/[% END %].+html\?id=(\d+)[% '$' %][% END %]
+ [% rt = F.match(qm) %]
+ <div style="font-size: smaller">
+ [% IF rt.0 %]
+ <a href="$F">cpan#[% rt.0 %]</a>
+ [% ELSE %]
+ <a href="[% IF F.match("^http") %][% GET F | html %][% ELSE %]mailto:[% GET F | uri %][% END %]">forwarded</a>
+ [% END %]
+ </div>
+ [% END %]
+ [% IF pkg.bts.$bug.keywordsA.size > 0 %]
+ <div style="font-size: smaller">
+ [% pkg.bts.$bug.keywordsA.join(", ") | html %]
+ </div>
+ [% END %]
+ </td>
+ <td>
+ [% qm = BLOCK %][% pkg | quotemeta %][% END %]
+ [% pkg.bts.$bug.subject.replace("^$qm:\\s*",'') | html %]</td>
+</tr>
+[% END #FOREACH %]
+</table>
+<!-- span class="paren">]</span -->
+</div></div>
+[% END #IF bugs %]
+[% END #BLOCK bts_link %]
+
+[% BLOCK package %]
+ [% SET arch_ver = pkg.archive.most_recent %]
+ [% SET arch_src = pkg.archive.most_recent_src %]
+ [% SET svn_ver = pkg.svn.version %]
+ [% SET svn_un_ver = pkg.svn.un_version %]
+ <tr>
+ <td>[% IF pkg.notes.size %]<span class="popup">$pkg.name<span
+ class="balloon" style="margin-left:0">[%
+ pkg.notes.join(', ')
+ %]</span></span>[% ELSE %]$pkg.name[% END %]
+ [% IF pkg.svn.section AND pkg.svn.section != "main" %]
+ <span class="section-$pkg.svn.section">[$pkg.svn.section]</span>
+ [% END #IF %]
+ [% IF ! hide_binaries
+ AND pkg.svn.binaries AND pkg.svn.binaries.size
+ AND (
+ pkg.svn.binaries.size > 1
+ OR pkg.svn.binaries.first != pkg.name
+ ) %]<br/><span style="font-size: smaller">([%
+ pkg.svn.binaries.join(', ') %])</span>[% END %]
+ </td>
+
+ <td[% IF pkg.hilight.svn %] class="todo"[% END %]>
+ [% chlog_url = BLOCK %][% pkg.name | format("$wsvn_url")
+ %]/debian/changelog?op=file&rev=0&sc=0[% END %]
+ <span class="popup"><a href="$chlog_url">$svn_ver</a><span
+ id="${pkg.name}_rel_chlog_baloon" class="balloon"><a
+ href="javascript:more_chlog('$pkg.name','rel')">[%
+ pkg.svn.changer | html %] — [% pkg.svn.date |
+ html %]</a>
+ </span></span>
+
+ [% IF svn_un_ver AND (svn_un_ver != svn_ver) %]
+ <span class="popup" style="font-size: smaller"><a
+ href="$chlog_url">($svn_un_ver)</a><span
+ id="${pkg.name}_unrel_chlog_baloon" class="balloon"><a
+ href="javascript:more_chlog('$pkg.name','unrel')">[%
+ pkg.svn.un_changer | html %] — [% pkg.svn.un_date
+ | html %]</a></span></span>[% END #IF %]
+ </td>
+
+ <td[% IF pkg.hilight.archive %] class="todo"[% END %]>
+ [% IF arch_ver %]
+ [% IF arch_src != "new" OR pkg.archive.unstable %]
+ <a href="http://packages.qa.debian.org/$pkg.name">$arch_ver</a>
+ [% ELSE %]
+ <a href="http://ftp-master.debian.org/new/${pkg.name}_${arch_ver}.html">$arch_ver</a>
+ [% END #IF %]
+ [% END #IF %]
+ [% IF arch_src AND arch_src != "unstable" %]
+ ($arch_src)
+ [% END #IF %]
+ </td>
+
+ <td>[% INCLUDE bts_link pkg=pkg %]</td>
+
+ <td[% IF pkg.hilight.upstream %] class="todo"[% END %]><a href="[% IF
+ pkg.watch.upstream_mangled %][% pkg.watch.upstream_url | html
+ %][% ELSE %][% "${pkg.name}" | format("$wsvn_url")
+ %]/debian/watch?op=file&rev=0&sc=0[% END %]">[%
+ pkg.watch.upstream_mangled || pkg.watch.error
+ %]</a>
+ [% IF pkg.hilight.upstream AND pkg.watch.error %]<a
+ href="[% "${pkg.name}" | format("$wsvn_url")
+ %]/debian/copyright?op=file&rev=0&sc=0"
+ >(copyright info)</a>
+ [% END %]
+ </td>
+ </tr>
+[% END #BLOCK package %]
+
+[% BLOCK section %]
+ [% IF list.0 %]
+ [% IF title and name %]
+ <thead>
+ <tr>
+ <th colspan="5" class="clickable"><a style="display: block" href="javascript:toggle_visibility('$name')">$title ($list.size)</a></th>
+ </tr>
+ </thead>
+ [% END #IF title and name %]
+ <tbody[% IF name %] id="$name" style="display: [% IF start_collapsed %]none[% ELSE %]table-row-group[% END %]"[% END %]>
+ <tr>
+ <th>Package</th>
+ <th>Repository</th>
+ <th>Archive</th>
+ <th>Bugs</th>
+ <th>Upstream</th>
+ </tr>
+ [% FOREACH pkg IN list %]
+ [% INCLUDE package pkg=pkg %]
+ [% END #FOREACH list %]
+ </tbody>
+ [% END #IF list.size %]
+[% END #BLOCK section %]
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+ <title>${group_name} packages overview</title>
+ <style type="text/css">
+ body {
+ background: white;
+ color: black;
+ margin: 0;
+ padding: 8px;
+ }
+ table {
+ border: 1px solid black;
+ border-collapse: collapse;
+ empty-cells: show;
+ }
+ td, th {
+ border: 1px solid black;
+ padding: 0.2em;
+ }
+ th.clickable, th.clickable a, th.clickable a:visited {
+ background: #404040;
+ color: white;
+ }
+ a {
+ text-decoration: none;
+ }
+/* before enabling this, think about the link colors -- they all have to
+ be visible with the new background
+ tr:hover td, tr:hover th {
+ background: #F5F5B5;
+ color: black;
+ }
+*/
+ #main_table {
+ width: 95%;
+ }
+ .todo {
+ background: #ADDBE6; /* lightblue */
+ }
+ .section-non-free { color: red; }
+ .section-contrib { color: maroon; }
+ .bts-wishlist {
+ color: green;
+ }
+ .bts-minor {
+ color: #004000; /* darkgreen */
+ }
+ .bts-normal, .bts-important {
+ }
+ .bts-grave, .bts-serious {
+ color: red;
+ }
+ .bts-critical {
+ color: red;
+ text-decoration: blink;
+ }
+ /* From ikiwiki templates */
+ .popup {
+ border-bottom: 1px dotted;
+ }
+ .popup .balloon,
+ .popup .paren,
+ .popup .expand {
+ display: none;
+ }
+ .popup:hover .balloon,
+ .popup:focus .balloon {
+ position: absolute;
+ display: block;
+ min-width: 15em;
+ max-width: 40em;
+ max-height: 75%;
+ overflow: auto;
+ margin: 0em 0 0 -15em;
+ padding: 0.5em;
+ border: 2px outset #F5F5B5; /* light yellowish */
+ background: #F5F5B5; /* light yellowish */
+ color: black;
+ /* Nonstandard, but very nice. */
+ opacity: 0.95;
+ -moz-opacity: 0.95;
+ filter: alpha(opacity=95);
+ }
+ table.bts-info-details td:first-child {
+ text-align: center;
+ }
+ div.bts-info div.popup {
+ text-align: center;
+ }
+ .bts-info-details p {
+ text-indent: -3em;
+ margin: 0 0 0 3em;
+ }
+ table.bts-info-details td {
+ border: 0;
+ vertical-align: top;
+ text-align: left;
+ }
+ #options {
+ float: left;
+ padding: .5em;
+ border: 1px black dashed;
+ margin-bottom: 1em;
+ }
+ #options h2 { font-size: 110%; margin: 0; }
+ #options div { display: none }
+ #w3org { clear: both; }
+ </style>
+ <script type="text/javascript">
+ //<![CDATA[
+ <!--
+ function toggle_visibility(id)
+ {
+ var el = document.getElementById(id);
+ el.style.display = (el.style.display == 'none' ? 'table-row-group' : 'none');
+ }
+ function more_chlog(pkg,rel)
+ {
+ var xml;
+ if (window.XMLHttpRequest) {
+ xml = new XMLHttpRequest();
+ } else if (window.ActiveXObject) {
+ xml = new ActiveXObject("Microsoft.XMLHTTP");
+ } else {
+ alert("Your browser lacks the needed ability to use Ajax. Sorry.");
+ return false;
+ }
+
+ xml.open('GET', 'qareport-chlog.cgi?pkg='+pkg+';rel='+rel);
+
+ xml.onreadystatechange = function() {
+ ajaxStateChanged(xml, pkg, rel);
+ };
+
+ xml.send('');
+ }
+ function ajaxStateChanged(xml, pkg, rel)
+ {
+ var el = document.getElementById(pkg+'_'+rel+'_chlog_baloon');
+ if( !el )
+ {
+ alert('Element "'+pkg+'_'+rel+'_chlog_baloon'+'" not found');
+ return false;
+ }
+ if( xml.readyState <= 1 )
+ {
+ el.innerHTML = el.innerHTML + "<br/>Loading...";
+ }
+ if( xml.readyState == 3 )
+ {
+ el.innerHTML = el.innerHTML + ".";
+ }
+ if( xml.readyState == 4 )
+ {
+ if( xml.status == 200 )
+ {
+ el.innerHTML = xml.responseText;
+ }
+ else
+ {
+ el.innerHTML = xml.status+': '+xml.StatusText;
+ }
+ }
+ }
+ //-->
+ //]]>
+ </script>
+</head>
+<body>
+<h1><a href="${group_url}">${group_name}</a></h1>
+<table id="main_table">
+ [% INCLUDE section data=data list=for_upgrade name="for_upgrade" title="Newer upstream available" %]
+ [% INCLUDE section data=data list=for_upload name="for_upload" title="Ready for upload" %]
+ [% INCLUDE section data=data list=waiting name="waiting" title="NEW and incoming" %]
+ [% INCLUDE section data=data list=weird name="weird" title="Packages with strange versions in the repository" %]
+ [% INCLUDE section data=data list=wip name="wip" title="Work in progress" %]
+ [% INCLUDE section data=data list=with_bugs name="with_bugs" title="With bugs" %]
+ [% INCLUDE section data=data list=all name='unclassified' title='Unclassified' %]
+</table>
+
+<h2>$shown_packages/$total_packages</h2>
+
+<div id="options">
+<h2>Options</h2>
+[% CGI.start_form({ Method => 'GET' }) %]
+<p>
+[% CGI.checkbox({ Name => 'show_all', Label => "Show all packages" }) %]
+
+[% CGI.checkbox({ Name => 'start_collapsed', Label => "Collapse tables" }) %]
+
+[% CGI.checkbox({ Name => 'hide_binaries',
+ Label => "Don't show binary package names" }) %]
+</p>
+<p>Order: [% CGI.radio_group({
+ Name => 'format',
+ Values => [ 'list', 'categories' ],
+ Default => 'categories',
+ Labels => {
+ categories => "by category",
+ list => "by name",
+ }
+}).join("\n") %]</p>
+<p>Refresh: [% CGI.radio_group({
+ Name => 'refresh',
+ Values => [ 0, 1800, 3600, 7200 ],
+ Default => 0,
+ Labels => {
+ "0" => "No refresh",
+ "1800" => "30 min",
+ "3600" => "1 hour",
+ "7200" => "2 hours"
+ }
+}).join("\n") %]</p>
+<p>[% CGI.submit({ Label => 'Reload' }) %]</p>
+[% CGI.end_form.join("\n") %]
+</div>
+<p id="w3org">
+ <a href="http://validator.w3.org/check?uri=referer"><img
+ style="border:0;width:88px;height:31px"
+ src="http://www.w3.org/Icons/valid-xhtml10-blue"
+ alt="Valid XHTML 1.0 Strict"/></a>
+ <a href="http://jigsaw.w3.org/css-validator/check/referer">
+ <img style="border:0;width:88px;height:31px"
+ src="http://jigsaw.w3.org/css-validator/images/vcss"
+ alt="Valid CSS!" /></a>
+</p>
+[% META id='$Id: by_category 13824 2008-01-29 07:54:35Z dmn $' %]
+<p style="border-top: 1px solid black">
+ <code>$template.id</code>
+</p>
+</body>
+</html>
Added: trunk/community/qa/wnppcheck
===================================================================
--- trunk/community/qa/wnppcheck (rev 0)
+++ trunk/community/qa/wnppcheck 2008-01-31 18:15:21 UTC (rev 1237)
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Released under the terms of the GNU GPL 2
+
+URL="http://bugs.debian.org/cgi-bin/pkgreport.cgi?which=pkg&data=wnpp&archive=no&show_list_header=no&show_list_footer=no&version=&pend-inc=pending&pend-exc=forwarded&pend-exc=pending-fixed&pend-exc=fixed&pend-exc=done&exclude=wontfix"
+
+echo "<!DOCTYPE HTML PUBLIC '-//W3C//DTD HTML 4.01 Transitional//EN' 'http://www.w3.org/TR/html4/loose.dtd'>"
+echo "<html><head><title>WNPP bugs wrt lib*-perl</title></head>"
+echo "<body><h1>WNPP bugs wrt lib*-perl</h1><pre>"
+wget -q -O - "$URL" | \
+ sed -e '/H2.*Forwarded/,$ d' | \
+ html2text -nobs -width 255 | \
+ egrep -A 1 "\#.+lib.+-perl" | \
+ sed -e 's/_/ /g' -e 's/</\</g' -e 's/>/\>/g' -e '/^--$/ d' | \
+ sed -e 's;#\([0-9]\+\);<a href="http://bugs.debian.org/\1">#\1</a>;g'
+echo "</pre></body></html>"
+
+exit 0
Property changes on: trunk/community/qa/wnppcheck
___________________________________________________________________
Name: svn:executable
+ *
More information about the debian-med-commit
mailing list