[ecj] 21/145: Import Debian changes 3.2.1-2

Emmanuel Bourg ebourg-guest at moszumanska.debian.org
Tue Mar 27 23:11:24 BST 2018


This is an automated email from the git hooks/post-receive script.

ebourg-guest pushed a commit to branch master
in repository ecj.

commit 152875d6c0dc87ec3ddd5919169b117613dd8647
Author: Matthias Klose <doko at debian.org>
Date:   Sat Oct 7 23:38:47 2006 +0200

    Import Debian changes 3.2.1-2
    
    ecj-bootstrap (3.2.1-2) unstable; urgency=low
    
      * Add 1.6 compatibility in package description.
      * Use own copy of aot-compile to build with less memory resources
        on arm and m68k.
      * Build the standalone binary without -fjni -findirect-dispatch.
---
 debian/aot-compile     |  89 +++++++++++
 debian/aotcompile.py   | 423 +++++++++++++++++++++++++++++++++++++++++++++++++
 debian/changelog       |   9 ++
 debian/classfile.py    | 222 ++++++++++++++++++++++++++
 debian/control         |   6 +-
 debian/ecj-bootstrap.1 |   2 +
 debian/rules           | 104 ++++++------
 7 files changed, 797 insertions(+), 58 deletions(-)

diff --git a/debian/aot-compile b/debian/aot-compile
new file mode 100644
index 0000000..2caf8f4
--- /dev/null
+++ b/debian/aot-compile
@@ -0,0 +1,89 @@
+#! /usr/bin/python
+
+## Copyright (C) 2006 Red Hat, Inc.
+## Written by Gary Benson <gbenson at redhat.com>
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2 of the License, or
+## (at your option) any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+import aotcompile
+import getopt
+import os
+import sys
+
+usage = """\
+Usage: %s [OPTION...] SRCDIR DSTDIR
+AOT-compile all Java bytecode in SRCDIR into DSTDIR.
+Note that DSTDIR must be within SRCDIR.
+
+Options:
+  -M, --make=PATH        make executable to use (%s)
+  -C, --gcj=PATH         gcj executable to use (%s)
+  -D, --dbtool=PATH      gcj-dbtool executable to use (%s)
+  -m, --makeflags=FLAGS  flags to pass to make during build
+  -c, --gcjflags=FLAGS   flags to pass to gcj during compilation
+                           in addition to %s
+  -l, --ldflags=FLAGS    flags to pass to gcj during linking
+                           in addition to %s
+  -e, --exclude=PATH     do not compile PATH
+  -L, --libdir=DIR       overwrite destination directory
+
+Extra flags may also be passed using the AOT_MAKEFLAGS, AOT_GCJFLAGS
+and AOT_LDFLAGS environment variables.""" % (
+    os.path.basename(sys.argv[0]),
+    aotcompile.PATHS["make"],
+    aotcompile.PATHS["gcj"],
+    aotcompile.PATHS["dbtool"],
+    repr(" ".join(aotcompile.GCJFLAGS)),
+    repr(" ".join(aotcompile.LDFLAGS)))
+
+try:
+    if os.environ.has_key("RPM_PACKAGE_NAME"):
+        raise aotcompile.Error, "not for use within rpm specfiles"
+
+    try:
+        opts, args = getopt.getopt(
+            sys.argv[1:],
+            "C:c:l:D:e:L:",
+            ["gcj=", "dbtool=", "gcjflags=", "ldflags=", "exclude=", "libdir="])
+        srcdir, dstdir = args
+    except:
+        print >>sys.stderr, usage
+        sys.exit(1)
+
+    compiler = aotcompile.Compiler(srcdir, dstdir)
+    for o, a in opts:
+        if o in ("-M", "--make"):
+            aotcompile.PATHS["make"] = a
+        if o in ("-C", "--gcj"):
+            aotcompile.PATHS["gcj"] = a
+        if o in ("-D", "--dbtool"):
+            aotcompile.PATHS["dbtool"] = a
+        if o in ("-m", "--makeflags"):
+            compiler.makeflags[0:0] = a.split()
+        if o in ("-c", "--gcjflags"):
+            compiler.gcjflags[0:0] = a.split()
+        if o in ("-l", "--ldflags"):
+            compiler.ldflags[0:0] = a.split()
+        if o in ("-e", "--exclude"):
+            compiler.exclusions.append(a)
+        if o in ("-L", "--libdir"):
+            compiler.libdir = a
+    
+    compiler.makeflags[0:0] = os.environ.get("AOT_MAKEFLAGS", "").split() 
+    compiler.gcjflags[0:0] = os.environ.get("AOT_GCJFLAGS", "").split() 
+    compiler.ldflags[0:0] = os.environ.get("AOT_LDFLAGS", "").split() 
+
+    compiler.compile()
+
+except aotcompile.Error, e:
+    print >>sys.stderr, "%s: error: %s" % (
+        os.path.basename(sys.argv[0]), e)
+    sys.exit(1)
diff --git a/debian/aotcompile.py b/debian/aotcompile.py
new file mode 100644
index 0000000..1793b79
--- /dev/null
+++ b/debian/aotcompile.py
@@ -0,0 +1,423 @@
+
+# -*- python -*-
+
+## Copyright (C) 2005, 2006 Red Hat, Inc.
+## Written by Gary Benson <gbenson at redhat.com>
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2 of the License, or
+## (at your option) any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+import classfile
+import copy
+import md5
+import operator
+import os
+import sys
+import cStringIO as StringIO
+import zipfile
+
+PATHS = {"make":   "/usr/bin/make",
+         "gcj":    "/usr/bin/gcj-4.1",
+         "dbtool": "/usr/bin/gcj-dbtool-4.1"}
+
+MAKEFLAGS = []
+GCJFLAGS = ["-g", "-O2", "-fPIC", "-findirect-dispatch", "-fjni"]
+LDFLAGS = ["-Wl,-Bsymbolic"]
+
+MAX_CLASSES_PER_JAR = 1024
+MAX_BYTES_PER_JAR = 1048576
+
+try:
+    for line in file('/proc/meminfo'):
+        if line.startswith('MemTotal:'):
+            memtotal = int(line.split()[1])
+        if memtotal < 270000:
+            MAX_CLASSES_PER_JAR = 512
+            MAX_BYTES_PER_JAR = 524288
+        if memtotal < 140000:
+            MAX_CLASSES_PER_JAR = 256
+            MAX_BYTES_PER_JAR = 262144
+except:
+    pass
+
+MAKEFILE = "Makefile"
+
+MAKEFILE_HEADER = '''\
+GCJ = %(gcj)s
+DBTOOL = %(dbtool)s
+GCJFLAGS = %(gcjflags)s
+LDFLAGS = %(ldflags)s
+
+%%.o: %%.jar
+	$(GCJ) -c $(GCJFLAGS) $< -o $@
+
+TARGETS = \\
+%(targets)s
+
+all: $(TARGETS)'''
+
+MAKEFILE_JOB = '''
+%(base)s_SOURCES = \\
+%(jars)s
+
+%(base)s_OBJECTS = \\
+$(%(base)s_SOURCES:.jar=.o)
+
+%(dso)s: $(%(base)s_OBJECTS)
+	$(GCJ) -shared $(GCJFLAGS) $(LDFLAGS) $^ -o $@
+
+%(db)s: $(%(base)s_SOURCES)
+	$(DBTOOL) -n $@ 64
+	for jar in $^; do \\
+            $(DBTOOL) -f $@ $$jar \\
+                %(libdir)s/%(dso)s; \\
+        done'''
+
+ZIPMAGIC, CLASSMAGIC = "PK\x03\x04", "\xca\xfe\xba\xbe"
+
+class Error(Exception):
+    pass
+
+class Compiler:
+    def __init__(self, srcdir, dstdir):
+        self.srcdir = os.path.abspath(srcdir)
+        self.dstdir = os.path.abspath(dstdir)
+        if not self.dstdir.startswith(self.srcdir):
+            raise Error, "dstdir must be within srcdir"
+        self.libdir = dstdir[len(self.srcdir):]
+
+        # Calling code may modify these parameters
+        self.gcjflags = copy.copy(GCJFLAGS)
+        self.ldflags = copy.copy(LDFLAGS)
+        self.makeflags = copy.copy(MAKEFLAGS)
+        self.exclusions = []
+
+    def compile(self):
+        """Search srcdir for classes and jarfiles, then generate
+        solibs and mappings databases for them all in libdir."""
+        if not os.path.isdir(self.dstdir):
+            os.makedirs(self.dstdir)
+        oldcwd = os.getcwd()
+        os.chdir(self.dstdir)
+        try:            
+            jobs = self.getJobList()
+            self.writeMakefile(MAKEFILE, jobs)
+            for job in jobs:
+                job.writeJars()
+            system([PATHS["make"]] + self.makeflags)
+            for job in jobs:
+                job.clean()
+            os.unlink(MAKEFILE)
+        finally:
+            os.chdir(oldcwd)
+
+    def getJobList(self):
+        """Return all jarfiles and class collections in srcdir."""
+        jobs = weed_jobs(find_jobs(self.srcdir, self.exclusions))
+        set_basenames(jobs)
+        return jobs
+
+    def writeMakefile(self, path, jobs):
+        """Generate a makefile to build the solibs and mappings
+        databases for the specified list of jobs."""
+        fp = open(path, "w")
+        print >>fp, MAKEFILE_HEADER % {
+            "gcj": PATHS["gcj"],
+            "dbtool": PATHS["dbtool"],
+            "gcjflags": " ".join(self.gcjflags),
+            "ldflags": " ".join(self.ldflags),
+            "targets": " \\\n".join(reduce(operator.add, [
+                (job.dsoName(), job.dbName()) for job in jobs]))}
+        for job in jobs:
+            values = job.ruleArguments()
+            values["libdir"] = self.libdir
+            print >>fp, MAKEFILE_JOB % values
+        fp.close()
+
+def find_jobs(dir, exclusions = ()):
+    """Scan a directory and find things to compile: jarfiles (zips,
+    wars, ears, rars, etc: we go by magic rather than file extension)
+    and directories of classes."""
+    def visit((classes, zips), dir, items):
+        for item in items:
+            path = os.path.join(dir, item)
+            if os.path.islink(path) or not os.path.isfile(path):
+                continue
+            magic = open(path, "r").read(4)
+            if magic == ZIPMAGIC:
+                zips.append(path)
+            elif magic == CLASSMAGIC:
+                classes.append(path)
+    classes, paths = [], []
+    os.path.walk(dir, visit, (classes, paths))
+    # Convert the list of classes into a list of directories
+    while classes:
+        # XXX this requires the class to be correctly located in its heirachy.
+        path = classes[0][:-len(os.sep + classname(classes[0]) + ".class")]
+        paths.append(path)
+        classes = [cls for cls in classes if not cls.startswith(path)]
+    # Handle exclusions.  We're really strict about them because the
+    # option is temporary in aot-compile-rpm and dead options left in
+    # specfiles will hinder its removal.
+    for path in exclusions:
+        if path in paths:
+            paths.remove(path)
+        else:
+            raise Error, "%s: path does not exist or is not a job" % path
+    # Build the list of jobs
+    jobs = []
+    paths.sort()
+    for path in paths:
+        if os.path.isfile(path):
+            job = JarJob(path)
+        else:
+            job = DirJob(path)
+        if len(job.classes):
+            jobs.append(job)
+    return jobs
+
+class Job:
+    """A collection of classes that will be compiled as a unit."""
+    
+    def __init__(self, path):
+        self.path, self.classes, self.blocks = path, {}, None
+
+    def addClass(self, bytes):
+        """Subclasses call this from their __init__ method for
+        every class they find."""
+        self.classes[md5.new(bytes).digest()] = bytes
+
+    def __makeBlocks(self):
+        """Split self.classes into chunks that can be compiled to
+        native code by gcj.  In the majority of cases this is not
+        necessary -- the job will have come from a jarfile which will
+        be equivalent to the one we generate -- but this only happens
+        _if_ the job was a jarfile and _if_ the jarfile isn't too big
+        and _if_ the jarfile has the correct extension and _if_ all
+        classes are correctly named and _if_ the jarfile has no
+        embedded jarfiles.  Fitting a special case around all these
+        conditions is tricky to say the least.
+
+        Note that this could be called at the end of each subclass's
+        __init__ method.  The reason this is not done is because we
+        need to parse every class file.  This is slow, and unnecessary
+        if the job is subsetted."""
+        names = {}
+        for hash, bytes in self.classes.items():
+            name = classname(bytes)
+            if not names.has_key(name):
+                names[name] = []
+            names[name].append(hash)
+        names = names.items()
+        # We have to sort somehow, or the jars we generate 
+        # We sort by name in a simplistic attempt to keep related
+        # classes together so inter-class optimisation can happen.
+        names.sort()
+        self.blocks, bytes = [[]], 0
+        for name, hashes in names:
+            for hash in hashes:
+                if len(self.blocks[-1]) >= MAX_CLASSES_PER_JAR \
+                   or bytes >= MAX_BYTES_PER_JAR:
+                    self.blocks.append([])
+                    bytes = 0
+                self.blocks[-1].append((name, hash))
+                bytes += len(self.classes[hash])
+
+    # From Archit Shah:
+    #   The implementation and the documentation don't seem to match.
+    #  
+    #    [a, b].isSubsetOf([a]) => True
+    #  
+    #   Identical copies of all classes this collection do not exist
+    #   in the other. I think the method should be named isSupersetOf
+    #   and the documentation should swap uses of "this" and "other"
+    #
+    # XXX think about this when I've had more sleep...
+    def isSubsetOf(self, other):
+        """Returns True if identical copies of all classes in this
+        collection exist in the other."""
+        for item in other.classes.keys():
+            if not self.classes.has_key(item):
+                return False
+        return True
+
+    def __targetName(self, ext):
+        return self.basename + ext
+
+    def tempJarName(self, num):
+        return self.__targetName(".%d.jar" % (num + 1))
+
+    def tempObjName(self, num):
+        return self.__targetName(".%d.o" % (num + 1))
+
+    def dsoName(self):
+        """Return the filename of the shared library that will be
+        built from this job."""
+        return self.__targetName(".so")
+
+    def dbName(self):
+        """Return the filename of the mapping database that will be
+        built from this job."""
+        return self.__targetName(".db")
+
+    def ruleArguments(self):
+        """Return a dictionary of values that when substituted
+        into MAKEFILE_JOB will create the rules required to build
+        the shared library and mapping database for this job."""
+        if self.blocks is None:
+            self.__makeBlocks()
+        return {
+            "base": "".join(
+                [c.isalnum() and c or "_" for c in self.dsoName()]),
+            "jars": " \\\n".join(
+                [self.tempJarName(i) for i in xrange(len(self.blocks))]),
+            "dso": self.dsoName(),
+            "db": self.dbName()}
+
+    def writeJars(self):
+        """Generate jarfiles that can be native compiled by gcj."""
+        if self.blocks is None:
+            self.__makeBlocks()
+        for block, i in zip(self.blocks, xrange(len(self.blocks))):
+            jar = zipfile.ZipFile(self.tempJarName(i), "w", zipfile.ZIP_STORED)
+            for name, hash in block:
+                jar.writestr(
+                    zipfile.ZipInfo("%s.class" % name), self.classes[hash])
+            jar.close()
+
+    def clean(self):
+        """Delete all temporary files created during this job's build."""
+        if self.blocks is None:
+            self.__makeBlocks()
+        for i in xrange(len(self.blocks)):
+            os.unlink(self.tempJarName(i))
+            os.unlink(self.tempObjName(i))
+
+class JarJob(Job):
+    """A Job whose origin was a jarfile."""
+
+    def __init__(self, path):
+        Job.__init__(self, path)
+        self._walk(zipfile.ZipFile(path, "r"))
+
+    def _walk(self, zf):
+        for name in zf.namelist():
+            bytes = zf.read(name)
+            if bytes.startswith(ZIPMAGIC):
+                self._walk(zipfile.ZipFile(StringIO.StringIO(bytes)))
+            elif bytes.startswith(CLASSMAGIC):
+                self.addClass(bytes)
+
+class DirJob(Job):
+    """A Job whose origin was a directory of classfiles."""
+
+    def __init__(self, path):
+        Job.__init__(self, path)
+        os.path.walk(path, DirJob._visit, self)
+
+    def _visit(self, dir, items):
+        for item in items:
+            path = os.path.join(dir, item)
+            if os.path.islink(path) or not os.path.isfile(path):
+                continue
+            fp = open(path, "r")
+            magic = fp.read(4)
+            if magic == CLASSMAGIC:
+                self.addClass(magic + fp.read())
+    
+def weed_jobs(jobs):
+    """Remove any jarfiles that are completely contained within
+    another.  This is more common than you'd think, and we only
+    need one nativified copy of each class after all."""
+    jobs = copy.copy(jobs)
+    while True:
+        for job1 in jobs:
+            for job2 in jobs:
+                if job1 is job2:
+                    continue
+                if job1.isSubsetOf(job2):
+                    msg = "subsetted %s" % job2.path
+                    if job2.isSubsetOf(job1):
+                        if (isinstance(job1, DirJob) and
+                            isinstance(job2, JarJob)):
+                            # In the braindead case where a package
+                            # contains an expanded copy of a jarfile
+                            # the jarfile takes precedence.
+                            continue
+                        msg += " (identical)"
+                    warn(msg)
+                    jobs.remove(job2)
+                    break
+            else:
+                continue
+            break
+        else:
+            break
+        continue
+    return jobs
+
+def set_basenames(jobs):
+    """Ensure that each jarfile has a different basename."""
+    names = {}
+    for job in jobs:
+        name = os.path.basename(job.path)
+        if not names.has_key(name):
+            names[name] = []
+        names[name].append(job)
+    for name, set in names.items():
+        if len(set) == 1:
+            set[0].basename = name
+            continue
+        # prefix the jar filenames to make them unique
+        # XXX will not work in most cases -- needs generalising
+        set = [(job.path.split(os.sep), job) for job in set]
+        minlen = min([len(bits) for bits, job in set])
+        set = [(bits[-minlen:], job) for bits, job in set]
+        bits = apply(zip, [bits for bits, job in set])
+        while True:
+            row = bits[-2]
+            for bit in row[1:]:
+                if bit != row[0]:
+                    break
+            else:
+                del bits[-2]
+                continue
+            break
+        set = zip(
+            ["_".join(name) for name in apply(zip, bits[-2:])],
+            [job for bits, job in set])
+        for name, job in set:
+            warn("building %s as %s" % (job.path, name))
+            job.basename = name
+    # XXX keep this check until we're properly general
+    names = {}
+    for job in jobs:
+        name = job.basename
+        if names.has_key(name):
+            raise Error, "%s: duplicate jobname" % name
+        names[name] = 1
+
+def system(command):
+    """Execute a command."""
+    status = os.spawnv(os.P_WAIT, command[0], command)
+    if status > 0:
+        raise Error, "%s exited with code %d" % (command[0], status)
+    elif status < 0:
+        raise Error, "%s killed by signal %d" % (command[0], -status)
+
+def warn(msg):
+    """Print a warning message."""
+    print >>sys.stderr, "%s: warning: %s" % (
+        os.path.basename(sys.argv[0]), msg)
+
+def classname(bytes):
+    """Extract the class name from the bytes of a class file."""
+    klass = classfile.Class(bytes)
+    return klass.constants[klass.constants[klass.name][1]][1]
diff --git a/debian/changelog b/debian/changelog
index 177e440..636a628 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,12 @@
+ecj-bootstrap (3.2.1-2) unstable; urgency=low
+
+  * Add 1.6 compatibility in package description.
+  * Use own copy of aot-compile to build with less memory resources
+    on arm and m68k.
+  * Build the standalone binary without -fjni -findirect-dispatch.
+
+ -- Matthias Klose <doko at debian.org>  Sat,  7 Oct 2006 23:38:47 +0200
+
 ecj-bootstrap (3.2.1-1) unstable; urgency=medium
 
   * New upstream version.
diff --git a/debian/classfile.py b/debian/classfile.py
new file mode 100644
index 0000000..d7e7d7e
--- /dev/null
+++ b/debian/classfile.py
@@ -0,0 +1,222 @@
+
+## Copyright (C) 2004, 2005 Red Hat, Inc.
+## Written by Gary Benson <gbenson at redhat.com>
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2 of the License, or
+## (at your option) any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+## GNU General Public License for more details.
+
+"""Read Java(TM) class files."""
+
+import cStringIO as StringIO
+import struct
+
+class Class:
+    def __init__(self, arg):
+        if hasattr(arg, "read"):
+            self.fp = arg
+        elif type(arg) == type(""):
+            if arg.startswith("\xca\xfe\xba\xbe"):
+                self.fp = StringIO.StringIO(arg)
+            else:
+                self.fp = open(arg, "r")
+        else:
+            raise TypeError, type(arg)
+
+        magic = self._read_int()
+        assert magic == 0xcafebabeL
+        minor, major = self._read(">HH")        
+        self.version = (major, minor)
+
+        self.pool_integrity_checks = None
+        try:
+            assert False
+        except AssertionError:
+            self.pool_integrity_checks = []
+
+        self._read_constants_pool()
+
+        self.access_flags = self._read_short()
+        self.name = self._read_reference_Class()
+        self.super = self._read_reference_Class()
+
+        self.interfaces = self._read_interfaces()
+        self.fields = self._read_fieldsormethods()
+        self.methods = self._read_fieldsormethods()
+        self.attributes = self._read_attributes()
+
+        if self.pool_integrity_checks is not None:
+            for index, tag in self.pool_integrity_checks:
+                assert self.constants[index][0] == tag
+
+        del self.fp, self.pool_integrity_checks
+
+    def __repr__(self):
+        result = []
+        attrs = [attr for attr in dir(self)
+                 if not attr.startswith("_") and attr != "Member"]
+        attrs.sort()
+        for attr in attrs:
+            result.append("%-13s %s" % (
+                attr + ":", attr == "constants" and
+                "<ELIDED>" or repr(getattr(self, attr))))
+        return "\n".join(result)
+
+    def _read_constants_pool(self):
+        self.constants = {}
+        skip = False
+        for i in xrange(1, self._read_short()):
+            if skip:
+                skip = False
+                continue
+            tag = {
+                1: "Utf8", 3: "Integer", 4: "Float", 5: "Long",
+                6: "Double", 7: "Class", 8: "String", 9: "Fieldref",
+                10: "Methodref", 11: "InterfaceMethodref",
+                12: "NameAndType"}[self._read_byte()]
+            skip = tag in ("Long", "Double") # crack crack crack!
+            self.constants[i] = (tag, getattr(self, "_read_constant_" + tag)())
+
+    def _read_interfaces(self):
+        result = []
+        for i in xrange(self._read_short()):
+            result.append(self._read_reference_Class())
+        return result
+
+    def _read_fieldsormethods(self):
+        result = []
+        for i in xrange(self._read_short()):
+            result.append(self.Member(self))
+        return result
+
+    class Member:
+        def __init__(self, source):
+            self.access_flags = source._read_short()
+            self.name = source._read_reference_Utf8()
+            self.descriptor = source._read_reference_Utf8()
+            self.attributes = source._read_attributes()
+
+        def __repr__(self):
+            result = []
+            attrs = [attr for attr in dir(self) if not attr.startswith("_")]
+            attrs.sort()
+            for attr in attrs:
+                value = getattr(self, attr)
+                if attr == "attributes" and value.has_key("Code"):
+                    value = value.copy()
+                    value.update({"Code": "<ELIDED>"})
+                result.append("%-13s %s" % (
+                    attr + ":", repr(value).replace(
+                        "'Code': '<ELIDED>'", "'Code': <ELIDED>")))
+            return ("\n%s" % (15 * " ")).join(result)
+
+    def _read_attributes(self):
+        result = {}
+        for i in xrange(self._read_short()):
+            name = self._read_reference_Utf8()
+            data = self.fp.read(self._read_int())
+            assert not result.has_key(name)
+            result[name] = data
+        return result
+
+    # Constants pool reference reader convenience functions
+
+    def _read_reference_Utf8(self):
+        return self._read_references("Utf8")[0]
+
+    def _read_reference_Class(self):
+        return self._read_references("Class")[0]
+
+    def _read_reference_Class_NameAndType(self):
+        return self._read_references("Class", "NameAndType")
+
+    def _read_references(self, *args):
+        result = []
+        for arg in args:
+            index = self._read_short()
+            if self.pool_integrity_checks is not None:
+                self.pool_integrity_checks.append((index, arg))
+            result.append(index)
+        return result
+
+    # Constants pool constant reader functions
+
+    def _read_constant_Utf8(self):
+        constant = self.fp.read(self._read_short())
+        try:
+            constant = constant.decode("utf-8")
+        except UnicodeError:
+            constant = _bork_utf8_decode(constant)
+        try:
+            constant = constant.encode("us-ascii")
+        except UnicodeError:
+            pass
+        return constant
+
+    def _read_constant_Integer(self):
+        return self._read_int()
+
+    def _read_constant_Float(self):
+        return self._read(">f")[0]
+
+    def _read_constant_Long(self):
+        return self._read(">q")[0]
+
+    def _read_constant_Double(self):
+        return self._read(">d")[0]
+
+    _read_constant_Class = _read_reference_Utf8
+    _read_constant_String = _read_reference_Utf8
+    _read_constant_Fieldref = _read_reference_Class_NameAndType
+    _read_constant_Methodref = _read_reference_Class_NameAndType
+    _read_constant_InterfaceMethodref = _read_reference_Class_NameAndType
+
+    def _read_constant_NameAndType(self):
+        return self._read_reference_Utf8(), self._read_reference_Utf8()
+
+    # Generic reader functions
+
+    def _read_int(self):
+        # XXX how else to read 32 bits on a 64-bit box?
+        h, l = map(long, self._read(">HH"))
+        return (h << 16) + l
+
+    def _read_short(self):
+        return self._read(">H")[0]
+
+    def _read_byte(self):
+        return self._read("B")[0]
+
+    def _read(self, fmt):
+        return struct.unpack(fmt, self.fp.read(struct.calcsize(fmt)))
+
+def _bork_utf8_decode(data):
+    # more crack!
+    bytes, unicode = map(ord, data), ""
+    while bytes:
+        b1 = bytes.pop(0)
+        if b1 & 0x80:
+            assert b1 & 0x40
+            b2 = bytes.pop(0)
+            assert b2 & 0xC0 == 0x80
+            if b1 & 0x20:
+                assert not b1 & 0x10
+                b3 = bytes.pop(0)
+                assert b3 & 0xC0 == 0x80
+                unicode += unichr(
+                    ((b1 & 0x0f) << 12) + ((b2 & 0x3f) << 6) + (b3 & 0x3f))
+            else:
+                unicode += unichr(((b1 & 0x1f) << 6) + (b2 & 0x3f))
+        else:
+            unicode += unichr(b1)
+    return unicode
+
+if __name__ == "__main__":
+    print Class("/usr/share/katana/build/ListDependentClasses.class")
+
diff --git a/debian/control b/debian/control
index 16e2b6a..ccc857e 100644
--- a/debian/control
+++ b/debian/control
@@ -3,7 +3,7 @@ Section: devel
 Priority: optional
 Maintainer: Debian Java Maintainers <pkg-java-maintainers at lists.alioth.debian.org>
 Uploaders: Jerry Haltom <wasabi at larvalstage.net>, Michael Koch <konqueror at gmx.de>, Matthias Klose <doko at debian.org>
-Build-Depends: cdbs (>= 0.4.26), debhelper (>= 4.1.0), gcj-4.1 (>= 4.1.1-13), fastjar, ant, dpkg (>= 1.13.19)
+Build-Depends: cdbs (>= 0.4.26), debhelper (>= 4.1.0), gcj-4.1 (>= 4.1.1-13), fastjar, ant, zip, dpkg (>= 1.13.19), python
 Standards-Version: 3.7.2
 
 Package: ecj-bootstrap
@@ -15,7 +15,7 @@ Provides: ecj
 Description: bootstrap version of the Eclipse Java compiler
  This package provides a bootstrap version of the Eclipse JDT compiler which is
  distributed as part of Eclipse. It passes the JCK (Java Compatibility Kit) and
- is compatible with Java 1.3, 1.4 and 1.5.
+ is compatible with Java 1.3, 1.4, 1.5 and 1.6.
  .
  This bootstrap version should be used for uploading initial versions of
  packages which are a dependency of Eclipse and build with ECJ.
@@ -27,7 +27,7 @@ Recommends: java-gcj-compat (>= 1.0.63)
 Description: bootstrap version of the Eclipse Java compiler (native version)
  This package provides a bootstrap version of the Eclipse JDT compiler which is
  distributed as part of Eclipse. It passes the JCK (Java Compatibility Kit) and
- is compatible with Java 1.3, 1.4 and 1.5.
+ is compatible with Java 1.3, 1.4, 1.5 and 1.6.
  .
  This bootstrap version should be used for uploading initial versions of
  packages which are a dependency of Eclipse and build with ECJ.
diff --git a/debian/ecj-bootstrap.1 b/debian/ecj-bootstrap.1
index cb3fe33..68d6c57 100644
--- a/debian/ecj-bootstrap.1
+++ b/debian/ecj-bootstrap.1
@@ -30,6 +30,8 @@ no classfile is generated
 .TP
 \fB\-1.5\fR                    set compliance level to 1.5
 .TP
+\fB\-1.6\fR                    set compliance level to 1.6
+.TP
 \fB\-source\fR <ver>           assertions toggle (1.3 or 1.4, default is 1.3 in -1.3 mode and 1.4 in -1.4 mode)
 .TP
 \fB\-nowarn\fR                 no warning (equivalent to '\-warn:none')
diff --git a/debian/rules b/debian/rules
index 2b880b7..8dd457a 100755
--- a/debian/rules
+++ b/debian/rules
@@ -8,20 +8,20 @@ ant_version = 1.6
 ant_version =
 GCJ = gcj-$(gcc_version)
 GIJ = gij-$(gcc_version)
+GCJDBTOOL = gcj-dbtool-$(gcc_version)
 
 DEB_HOST_ARCH ?= $(shell dpkg-architecture -qDEB_HOST_ARCH)
 
 with_native := yes
 with_rebuild := yes
 
-ifneq (,$(findstring $(DEB_HOST_ARCH), arm m68k))
+ifneq (,$(findstring $(DEB_HOST_ARCH), arm))
   with_rebuild := no
 endif
 
 default: build
 
 eclipse_root = ../eclipse-3.1.2/source-tree
-eclipse_root = ../321
 
 get-source:
 	test -d $(eclipse_root)
@@ -37,31 +37,28 @@ get-source:
 	cp -a $(eclipse_root)/plugins/org.eclipse.jdt.core/antadapter/org/eclipse/jdt/internal/antadapter \
 		src/org.eclipse.jdt.core/org/eclipse/jdt/internal/
 
-build/ecj-bootstrap:: build/stamp
-build/stamp:
+
+build/ecj-bootstrap:: build/stamp-bytecode build/stamp-nativecode
+
+build/stamp-bytecode:
+	rm -rf build/bin
 	mkdir -p build/bin
 
 	cp -r src/org.eclipse.jdt.core/org build/bin/
-	echo "byte-compiling (using gcj) ..."
+	find build/bin -name '*.java' > build/sourcefiles
+	split -l 25 build/sourcefiles ecj-sources.
+	mv ecj-sources.* build/bin
+
 	set -e; \
-	for f in $$(find build/bin -name '*.java'); do \
+	for list in $$(find build/bin -name 'ecj-sources.*'); do \
+	    echo "building files in $$list ..."; \
 	    $(GCJ) -d build/bin -C -g \
-	        -I/usr/share/ant$(ant_version)/lib/ant.jar \
+		-I/usr/share/ant$(ant_version)/lib/ant.jar \
 		-Ibuild/bin \
-	        $$f; \
+		@$$list; \
 	done
 
-	set -e; \
-	rv=0; \
-	for f in $$(find build/bin -name '*.java'); do \
-	  f2=$${f%*.java}.class; \
-	  if [ ! -f $$f2 ]; then \
-	    echo not found: $$f2; \
-	    rv=1; \
-	  fi; \
-	done; \
-	exit $$rv
-
+	find build/bin -name 'sources.list' -exec rm -f {} \;
 	find build/bin -name '*.java' -exec rm -f {} \;
 	find build/bin -name '*.html' -exec rm -f {} \;
 
@@ -73,7 +70,7 @@ ifeq ($(with_rebuild),yes)
 	mkdir -p build/bin
 	cp -r src/org.eclipse.jdt.core/org build/bin/
 
-	$(GIJ) \
+	time $(GIJ) \
 	    -classpath build/bootstrap/ecj.jar:/usr/share/ant$(ant_version)/lib/ant.jar \
 	    org.eclipse.jdt.internal.compiler.batch.Main \
 	    -bootclasspath /usr/share/java/libgcj-$(gcc_version).jar \
@@ -88,66 +85,63 @@ else
 	mkdir -p build/dist
 	cp -p build/bootstrap/ecj.jar build/dist/ecj.jar
 endif
-	rm -rf build/bin
+	cp build/dist/ecj.jar build/dist/ecj-standalone.jar
+	zip -d build/dist/ecj-standalone.jar \
+		'org/eclipse/jdt/core/JDTCompilerAdapter*'
+
+	touch build/stamp-bytecode
 
+build/stamp-nativecode: build/stamp-bytecode
+	find build/dist -name '*.jar.*.jar' | xargs -r rm -f
 ifeq ($(with_native),yes)
+	: # ecj.jar.so
+  ifneq (,$(filter $(DEB_HOST_ARCH), hppa))
 	$(GCJ) \
 	    -O2 -g -Wl,-Bsymbolic,-O1 -shared -fPIC -fjni -findirect-dispatch \
 	    -o build/dist/ecj.jar.so build/dist/ecj.jar
-
-	$(GCJ) \
-	    -O2 -g -Wl,-Bsymbolic,-O1 -fPIC -fjni -findirect-dispatch \
+	$(GCJDBTOOL) -f build/dist/ecj-bootstrap.db \
+	    build/dist/ecj.jar.so /usr/lib/gcj/ecj.jar.so \
+		|| touch build/dist/ecj-bootstrap.db
+  else
+	PYTHONPATH=$(CURDIR)/debian time python debian/aot-compile \
+	    -L /usr/lib/gcj build/dist build/dist
+	mv build/dist/ecj.jar.db build/dist/ecj-bootstrap.db
+  endif
+
+	: # ecj-bootstrap-gcj
+	time $(GCJ) \
+	    -O2 -g -Wl,-O1 \
 	    --main=org.eclipse.jdt.internal.compiler.batch.Main \
-	    -o build/dist/ecj-bootstrap-gcj build/dist/ecj.jar
+	    -o build/dist/ecj-bootstrap-gcj build/dist/ecj-standalone.jar
 endif
 
-	touch build/stamp
+	touch build/stamp-nativecode
 
-xxx:
-	$(GCJ) -v \
-	    -O2 -g -Wl,-Bsymbolic -shared -fPIC -fjni -findirect-dispatch \
-	    -o /tmp/ecj.jar.so /usr/share/java/ecj.jar
 
-	$(GCJ) -v \
-	    -O2 -g -Wl,-Bsymbolic -fPIC -fjni -findirect-dispatch \
-	    --main=org.eclipse.jdt.internal.compiler.batch.Main \
-	    -o /tmp/ecj /usr/share/java/ecj.jar
-	objdump -x /tmp/ecj.jar.so | grep NEEDED
-	objdump -x /tmp/ecj | grep NEEDED
-
-install-ecj-bootstrap:
-	# Install pieces into proper hierarchy.
+install/ecj-bootstrap::
 	mkdir -p debian/tmp/usr/share/java
 	install -m 644 build/dist/ecj.jar debian/tmp/usr/share/java/
 
 	mkdir -p debian/tmp/usr/bin
 	install -m 755 ecj-bootstrap debian/tmp/usr/bin/
 
+install/ecj-bootstrap-gcj::
 ifeq ($(with_native),yes)
 	mkdir -p debian/tmp/usr/lib/gcj
 	install -m 644 build/dist/ecj.jar.so debian/tmp/usr/lib/gcj
 
-	# Native GCJ version.
-	mkdir -p debian/tmp/usr/bin
-	install -m 755 build/dist/ecj-bootstrap-gcj debian/tmp/usr/bin/
-
 	mkdir -p debian/tmp/usr/share/gcj/classmap.d
-	gcj-dbtool-$(gcc_version) -n debian/tmp/usr/share/gcj/classmap.d/ecj-bootstrap.db
+	install -m 644 build/dist/ecj-bootstrap.db \
+		debian/tmp/usr/share/gcj/classmap.d/
 
-	cd debian/tmp && \
-	for jar in $$(find usr/share/java -name '*.jar'); do \
-		echo $$jar '->' usr/lib/gcj/$$(basename $$jar).so; \
-		gcj-dbtool-$(gcc_version) -f usr/share/gcj/classmap.d/ecj-bootstrap.db \
-			$$jar /usr/lib/gcj/$$(basename $$jar).so || exit 1; \
-	done
+	mkdir -p debian/tmp/usr/bin
+	install -m 755 build/dist/ecj-bootstrap-gcj debian/tmp/usr/bin/
 endif
-
-
-install/ecj-bootstrap:: install-ecj-bootstrap
-install/ecj-bootstrap-gcj:: install-ecj-bootstrap
 	mkdir -p debian/ecj-bootstrap-gcj/usr/share/man/man1
-	ln -sf ecj-bootstrap.1.gz debian/ecj-bootstrap-gcj/usr/share/man/man1/ecj-bootstrap-gcj.1.gz
+	ln -sf ecj-bootstrap.1.gz \
+	  debian/ecj-bootstrap-gcj/usr/share/man/man1/ecj-bootstrap-gcj.1.gz
 
 
 clean::
 	rm -rf build
+	rm -f debian/*.pyc

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-java/ecj.git



More information about the pkg-java-commits mailing list