[libfastutil-java] 04/06: Imported Upstream version 6.5.4

Andreas Tille tille at debian.org
Wed Aug 13 19:54:59 UTC 2014


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository libfastutil-java.

commit 741450807cb83a59f9248b50e39789331b590a61
Author: Andreas Tille <tille at debian.org>
Date:   Wed Aug 13 21:49:23 2014 +0200

    Imported Upstream version 6.5.4
---
 CHANGES                                            |  801 +++++
 LICENSE-2.0                                        |  202 ++
 README                                             |   44 +
 build.properties                                   |   20 +
 build.xml                                          |  227 ++
 debian/changelog                                   |    5 -
 debian/compat                                      |    1 -
 debian/control                                     |   57 -
 debian/copyright                                   |   44 -
 debian/libfastutil-java-doc.javadoc                |    1 -
 debian/libfastutil-java.jlibs                      |    1 -
 debian/rules                                       |   42 -
 debian/source/format                               |    1 -
 debian/watch                                       |    2 -
 drv/AVLTreeMap.drv                                 | 2746 ++++++++++++++++++
 drv/AVLTreeSet.drv                                 | 2165 ++++++++++++++
 drv/AbstractBidirectionalIterator.drv              |   54 +
 drv/AbstractBigList.drv                            |  651 +++++
 drv/AbstractBigListIterator.drv                    |   66 +
 drv/AbstractCollection.drv                         |  282 ++
 drv/AbstractComparator.drv                         |   39 +
 drv/AbstractFunction.drv                           |  114 +
 drv/AbstractIterator.drv                           |   58 +
 drv/AbstractList.drv                               |  642 ++++
 drv/AbstractListIterator.drv                       |   47 +
 drv/AbstractMap.drv                                |  305 ++
 drv/AbstractPriorityQueue.drv                      |   42 +
 drv/AbstractSet.drv                                |   82 +
 drv/AbstractSortedMap.drv                          |  174 ++
 drv/AbstractSortedSet.drv                          |   61 +
 drv/AbstractStack.drv                              |   72 +
 drv/ArrayFIFOQueue.drv                             |  206 ++
 drv/ArrayFrontCodedList.drv                        |  712 +++++
 drv/ArrayIndirectDoublePriorityQueue.drv           |  635 ++++
 drv/ArrayIndirectPriorityQueue.drv                 |  650 +++++
 drv/ArrayList.drv                                  | 1270 ++++++++
 drv/ArrayMap.drv                                   |  326 +++
 drv/ArrayPriorityQueue.drv                         |  210 ++
 drv/ArraySet.drv                                   |  186 ++
 drv/Arrays.drv                                     | 1729 +++++++++++
 drv/BidirectionalIterator.drv                      |   62 +
 drv/BigArrayBigList.drv                            | 1168 ++++++++
 drv/BigArrays.drv                                  | 1498 ++++++++++
 drv/BigList.drv                                    |  162 ++
 drv/BigListIterator.drv                            |   36 +
 drv/BigListIterators.drv                           |  176 ++
 drv/BigLists.drv                                   |  879 ++++++
 drv/BinIO.drv                                      |  166 ++
 drv/BinIOFragment.drv                              |  867 ++++++
 drv/Collection.drv                                 |  140 +
 drv/Collections.drv                                |  265 ++
 drv/Comparator.drv                                 |   43 +
 drv/Comparators.drv                                |   70 +
 drv/Function.drv                                   |  109 +
 drv/Functions.drv                                  |  243 ++
 drv/Hash.drv                                       |   52 +
 drv/HeapIndirectDoublePriorityQueue.drv            |  654 +++++
 drv/HeapIndirectPriorityQueue.drv                  |  674 +++++
 drv/HeapPriorityQueue.drv                          |  386 +++
 drv/HeapSemiIndirectPriorityQueue.drv              |  572 ++++
 drv/HeapSesquiIndirectDoublePriorityQueue.drv      |  667 +++++
 drv/Heaps.drv                                      |  118 +
 drv/IndirectDoublePriorityQueue.drv                |   37 +
 drv/IndirectHeaps.drv                              |  159 +
 drv/IndirectPriorityQueue.drv                      |   37 +
 drv/Iterable.drv                                   |   40 +
 drv/Iterator.drv                                   |   57 +
 drv/Iterators.drv                                  |  838 ++++++
 drv/LinkedOpenCustomDoubleHashMap.drv              |    1 +
 drv/LinkedOpenCustomDoubleHashSet.drv              |    1 +
 drv/LinkedOpenCustomHashMap.drv                    |    1 +
 drv/LinkedOpenCustomHashSet.drv                    |    1 +
 drv/LinkedOpenDoubleHashMap.drv                    |    1 +
 drv/LinkedOpenDoubleHashSet.drv                    |    1 +
 drv/LinkedOpenHashMap.drv                          |    1 +
 drv/LinkedOpenHashSet.drv                          |    1 +
 drv/List.drv                                       |  210 ++
 drv/ListIterator.drv                               |   40 +
 drv/Lists.drv                                      |  840 ++++++
 drv/Map.drv                                        |  138 +
 drv/Maps.drv                                       |  358 +++
 drv/OpenCustomDoubleHashMap.drv                    |    1 +
 drv/OpenCustomDoubleHashSet.drv                    |    1 +
 drv/OpenCustomHashMap.drv                          |    1 +
 drv/OpenCustomHashSet.drv                          |    1 +
 drv/OpenDoubleHashMap.drv                          | 2606 +++++++++++++++++
 drv/OpenDoubleHashSet.drv                          | 1986 +++++++++++++
 drv/OpenHashBigSet.drv                             | 1298 +++++++++
 drv/OpenHashMap.drv                                | 3061 ++++++++++++++++++++
 drv/OpenHashSet.drv                                | 2271 +++++++++++++++
 drv/PriorityQueue.drv                              |   70 +
 drv/PriorityQueues.drv                             |   81 +
 drv/RBTreeMap.drv                                  | 2687 +++++++++++++++++
 drv/RBTreeSet.drv                                  | 2137 ++++++++++++++
 drv/SemiIndirectHeaps.drv                          |  243 ++
 drv/Set.drv                                        |   50 +
 drv/Sets.drv                                       |  515 ++++
 drv/SortedMap.drv                                  |  157 +
 drv/SortedMaps.drv                                 |  887 ++++++
 drv/SortedSet.drv                                  |  151 +
 drv/SortedSets.drv                                 |  666 +++++
 drv/Stack.drv                                      |   50 +
 drv/StripedOpenHashMap.drv                         |  168 ++
 drv/TextIO.drv                                     |   74 +
 drv/TextIOFragment.drv                             |  406 +++
 gencsource.sh                                      |  614 ++++
 makefile                                           |  666 +++++
 pom.xml                                            |   30 +
 .../fastutil/AbstractIndirectPriorityQueue.java    |   41 +
 .../unimi/dsi/fastutil/AbstractPriorityQueue.java  |   35 +
 src/it/unimi/dsi/fastutil/AbstractStack.java       |   41 +
 src/it/unimi/dsi/fastutil/Arrays.java              |  330 +++
 .../unimi/dsi/fastutil/BidirectionalIterator.java  |   55 +
 src/it/unimi/dsi/fastutil/BigArrays.java           |  489 ++++
 src/it/unimi/dsi/fastutil/BigList.java             |  125 +
 src/it/unimi/dsi/fastutil/BigListIterator.java     |   62 +
 src/it/unimi/dsi/fastutil/BigSwapper.java          |   32 +
 src/it/unimi/dsi/fastutil/Function.java            |  101 +
 src/it/unimi/dsi/fastutil/Hash.java                |  173 ++
 src/it/unimi/dsi/fastutil/HashCommon.java          |  173 ++
 .../unimi/dsi/fastutil/IndirectPriorityQueue.java  |  161 +
 .../unimi/dsi/fastutil/IndirectPriorityQueues.java |  118 +
 src/it/unimi/dsi/fastutil/Maps.java                |   36 +
 src/it/unimi/dsi/fastutil/PriorityQueue.java       |  102 +
 src/it/unimi/dsi/fastutil/PriorityQueues.java      |  109 +
 src/it/unimi/dsi/fastutil/Size64.java              |   50 +
 src/it/unimi/dsi/fastutil/Stack.java               |   72 +
 src/it/unimi/dsi/fastutil/Swapper.java             |   31 +
 src/it/unimi/dsi/fastutil/booleans/package.html    |   16 +
 src/it/unimi/dsi/fastutil/bytes/package.html       |   12 +
 src/it/unimi/dsi/fastutil/chars/package.html       |   12 +
 src/it/unimi/dsi/fastutil/doubles/package.html     |   12 +
 src/it/unimi/dsi/fastutil/floats/package.html      |   12 +
 src/it/unimi/dsi/fastutil/ints/package.html        |   12 +
 .../dsi/fastutil/io/FastBufferedInputStream.java   |  551 ++++
 .../dsi/fastutil/io/FastBufferedOutputStream.java  |  217 ++
 .../dsi/fastutil/io/FastByteArrayInputStream.java  |  131 +
 .../dsi/fastutil/io/FastByteArrayOutputStream.java |  113 +
 .../fastutil/io/FastMultiByteArrayInputStream.java |  184 ++
 .../io/InspectableFileCachedInputStream.java       |  290 ++
 .../dsi/fastutil/io/MeasurableInputStream.java     |   33 +
 .../dsi/fastutil/io/MeasurableOutputStream.java    |   33 +
 src/it/unimi/dsi/fastutil/io/MeasurableStream.java |   56 +
 .../dsi/fastutil/io/RepositionableStream.java      |   45 +
 src/it/unimi/dsi/fastutil/io/package.html          |   19 +
 src/it/unimi/dsi/fastutil/longs/package.html       |   12 +
 src/it/unimi/dsi/fastutil/objects/package.html     |   20 +
 src/it/unimi/dsi/fastutil/shorts/package.html      |   12 +
 src/overview.html                                  |  878 ++++++
 test/it/unimi/dsi/fastutil/ArraysTest.java         |   87 +
 test/it/unimi/dsi/fastutil/BigArraysTest.java      |   84 +
 .../bytes/ByteArrayFrontCodedListTest.java         |  125 +
 .../unimi/dsi/fastutil/bytes/ByteArraysTest.java   |  141 +
 .../chars/CharArrayFrontCodedListTest.java         |  126 +
 .../unimi/dsi/fastutil/chars/CharArraysTest.java   |  141 +
 .../dsi/fastutil/doubles/DoubleArraysTest.java     |  540 ++++
 .../dsi/fastutil/doubles/DoubleBigArraysTest.java  |  289 ++
 .../unimi/dsi/fastutil/floats/FloatArraysTest.java |  210 ++
 .../dsi/fastutil/ints/Int2IntArrayMapTest.java     |  112 +
 .../ints/Int2IntLinkedOpenHashMapTest.java         |  495 ++++
 .../ints/Int2IntOpenCustomHashMapTest.java         |  287 ++
 .../dsi/fastutil/ints/Int2IntOpenHashMapTest.java  |  284 ++
 .../ints/Int2ObjectLinkedOpenHashMapTest.java      |  111 +
 .../fastutil/ints/Int2ObjectOpenHashMapTest.java   |  111 +
 .../dsi/fastutil/ints/IntArrayFIFOQueueTest.java   |  147 +
 .../fastutil/ints/IntArrayFrontCodedListTest.java  |  125 +
 .../ints/IntArrayIndirectPriorityQueueTest.java    |  344 +++
 .../fastutil/ints/IntArrayPriorityQueueTest.java   |  149 +
 .../unimi/dsi/fastutil/ints/IntArraySetTest.java   |   80 +
 test/it/unimi/dsi/fastutil/ints/IntArraysTest.java |  591 ++++
 .../dsi/fastutil/ints/IntBigArrayBigListTest.java  |  489 ++++
 .../unimi/dsi/fastutil/ints/IntBigArraysTest.java  |  232 ++
 .../ints/IntHeapSemiIndirectPriorityQueueTest.java |   73 +
 .../fastutil/ints/IntLinkedOpenHashSetTest.java    |  404 +++
 .../fastutil/ints/IntOpenCustomHashSetTest.java    |  291 ++
 .../dsi/fastutil/ints/IntOpenHashBigSetTest.java   |  321 ++
 .../dsi/fastutil/ints/IntOpenHashSetTest.java      |  315 ++
 .../fastutil/ints/IntSemiIndirectHeapsTest.java    |   46 +
 .../ints/StripedInt2IntOpenHashMapTest.java        |  224 ++
 test/it/unimi/dsi/fastutil/io/BinIOTest.java       |  194 ++
 .../fastutil/io/FastBufferedInputStreamTest.java   |  345 +++
 .../fastutil/io/FastBufferedOutputStreamTest.java  |   90 +
 .../fastutil/io/FastByteArrayOutputStreamTest.java |  100 +
 .../io/InspectableFileCachedInputStreamTest.java   |  216 ++
 test/it/unimi/dsi/fastutil/io/TestIOTest.java      |  165 ++
 .../longs/LongArrayFrontCodedListTest.java         |  125 +
 .../unimi/dsi/fastutil/longs/LongArraysTest.java   |  127 +
 .../objects/AbstractObject2IntFunctionTest.java    |   16 +
 .../objects/Object2IntOpenHashMapTest.java         |  201 ++
 .../objects/ObjectBigArrayBigListTest.java         |  512 ++++
 .../dsi/fastutil/objects/ObjectBigArraysTest.java  |  115 +
 .../fastutil/objects/ObjectOpenHashBigSetTest.java |  260 ++
 .../fastutil/objects/ObjectOpenHashSetTest.java    |  271 ++
 .../objects/Reference2ReferenceArrayMapTest.java   |   99 +
 .../fastutil/objects/ReferenceArraySetTest.java    |   87 +
 .../shorts/ShortArrayFrontCodedListTest.java       |  125 +
 .../unimi/dsi/fastutil/shorts/ShortArraysTest.java |  141 +
 197 files changed, 62634 insertions(+), 154 deletions(-)

diff --git a/CHANGES b/CHANGES
new file mode 100644
index 0000000..78dadd4
--- /dev/null
+++ b/CHANGES
@@ -0,0 +1,801 @@
+6.5.4
+
+- Further fixes related to NaNs in sorting.
+
+- Fixed very old bug in FastByteArrayOutputStream.write(int).
+  Thanks to Massimo Santini for reporting this bug.
+
+- We now use Arrays.MAX_ARRAY_SIZE, which is equal to Integer.MAX_VALUE
+  minus 8, to bound all array allocations. Previously, it might happen
+  that grow() and other array-related functions could try to allocate an
+  array of size Integer.MAX_VALUE, which is technically correct from the
+  JLS, but will not work on most JVMs. The maximum length we use now is
+  the same value as that used by java.util.ArrayList. Thanks to William
+  Harvey for suggesting this change.
+
+6.5.3
+
+- Corrected erroneous introduction of compare() methods on integral
+  classes (they appeared in Java 7).
+
+6.5.2
+
+- A few changes were necessary to make fastutil behave as Java on NaNs
+  when sorting. Double.compareTo() and Float.compareTo() treat Double.NaN
+  as greater than Double.POSITIVE_INFINITY, and fastutil was not doing it.
+  As part of the change, now all comparisons between primitive types are
+  performed using the compare() method of the wrapper class
+  (microbenchmarks confirmed that there is no speed penalty for that,
+  probably due to inlining or even intrinsification). Thanks to Adam Klein
+  for reporting this bug.
+
+- All quickSort() implementations that do not involve a comparator are now
+  deprecated, as there are equivalent/better versions in java.util.Arrays.
+
+6.5.0 -> 6.5.1
+
+- Now FastBuffered{Input/Output}Stream has a constructor with an
+  explicitly given buffer.
+
+- Abandoned golden-ratio based expansion of arrays and lists in favour of
+  a (more standard) doubling approach.
+
+- Array-based FIFO queues now reduce their capacity automatically by
+  halving when the size becomes one fourth of the length.
+
+- The add() method for open hash maps has been deprecated and replaced by
+  addTo(), as the name choice proved to be a recipe for disaster.
+
+- New InspectableFileCachedInputStream for caching easily large byte
+  streams partially on file and partially in memory.
+
+- The front() method for semi-indirect heaps took no comparator, but
+  was used in queues in which you could support a comparator. There
+  is now a further version accepting a comparator.
+
+- Serial Version UIDs are now private.
+
+6.4.6 -> 6.5.0
+
+- Fixed type of array hash strategies.
+
+- Fixed use of equals() instead of compareTo() in
+  SemiIndirectHeaps.front(). Thanks to Matthew Hatem for reporting this
+  bug.
+
+- Now we generate custom hash maps for primite types, too (as we were
+  already doing for sets).
+
+6.4.5 -> 6.4.6
+
+- In array-based priority queues changed() would not invalidate
+  the cached index of the smallest element.
+
+6.4.4 -> 6.4.5
+
+- In some very rare circumstances, enumeration of hash sets or maps
+  combined with massive element removal (using the iterator remove()
+  method) could have led to inconsistent enumeration (duplicates and
+  missing elements). Thanks to Hamish Morgan for reporting this bug.
+
+6.4.3 -> 6.4.4
+
+- Array-based maps were not implementing correctly entrySet().contains(),
+  and as a consequence equals() between such maps was broken. Thanks to
+  Benson Margulies for reporting this bug.
+
+6.4.2 -> 6.4.3
+
+- Now array-based priority queue cache their first element. Moreover,
+  they implement the correct type-specific interface.
+
+6.4.1 -> 6.4.2
+
+- Now we have indirect lexicographical radix sort on pairs of arrays,
+  mainly used to compute quickly Kendall's tau.
+
+- New reverse method for arrays (useful for radix descending sorts).
+
+- Radix sort (one or two arrays) for big arrays.
+
+- Now radix sort uses correctly (minimally) sized support arrays when
+  sorting subarrays.
+
+6.4 -> 6.4.1
+
+- Now we have a separate directory, settable in the makefile, to generate
+  sources. This makes Maven integration easier.
+
+- The store methods in TextIO for big arrays were broken.
+
+- Now big-array lists implement the Stack interface.
+
+- Fixed subtle bug in rehash() methods of big hash sets.
+
+6.3 -> 6.4
+
+- WARNING: Indirect queues must obviously have a way to determine whether
+  an index is in the queue. It was an oversight in the interface design
+  that a contains() method was not present. We wook the risk of adding it
+  now. At the same time, we modified remove() so that now returns a
+  boolean specifying whether the index to be removed was actually in the
+  queue, as this is more in line with the Java Collections Framework.
+
+- Removed unused double-priority queue related classes.
+
+- Now array-based sets and maps have a constructor based on
+  java.util.Collection and java.util.Map (as for the other
+  kind of sets and maps).
+
+- New doubly linked implementation for linked hash maps and sets. It uses
+  twice the space for pointers, but mixes well with linear probing, so we
+  have again constant-time true deletions. Moreover, iterators can be
+  started from any key in constant time (albeit the first access to the
+  index of the list iterator will require a linear scan, unless the
+  iterator started from the first or the last key). Additional methods
+  such as getAndMoveToFirst() make the creation of LRU caches very easy.
+  Thanks to Brien Colwell for donating the code.
+
+- Now object-based array FIFO queues provide deque methods. Moreover,
+  they clean up the backing array after returning an object or when
+  performing a clear().
+
+- New get() method in set implementations makes it possible to recover
+  the actual object int the collection that is equal to the query key.
+
+- A number of bugs were found and fixed by Christian Falz (thanks!). In
+  all binary search code the "to" parameter was *inclusive*, but the
+  documentation said *exclusive*, with obvious problems. Hash map
+  iterators could return under some very subtle and almost irreproducible
+  circumstances a previously deleted slot. Deleted hash map entries would
+  return spurious null values.
+
+6.2.2 -> 6.3
+
+- We now have radix sort. It's much faster than quicksort, but it can
+  only sort keys in their natural order. There are multiple-array
+  and indirect (and possibly stable) versions available.
+
+- There are now custom hash sets also for type-specific keys. This makes
+  it possible to use hash sets to index data indirectly (e.g., using
+  integer or long just as indices).
+
+- Shuffling static methods for all kinds of (big) list and arrays.
+
+6.2.1 -> 6.2.2
+
+- A new add() method makes the usage of maps as counters easier
+  and faster.
+
+6.2.0 -> 6.2.1
+
+- A very stupid bug was causing twice the rehashing that was 
+  necessary. Now insertions in hash-based classes are significantly faster.
+
+6.1.0 -> 6.2.0
+
+- A better structure of the scan loop for hash tables borrowed
+  from HPPC (http://labs.carrotsearch.com/hppc.html) gives some
+  speed improvement to hash-based classes.
+
+6.0.0 -> 6.1.0
+
+- Hash-based classes have been rewritten using linear probing and
+  a good hash (MurmurHash3). The old classes can be still generated
+  using the target oldsources.
+
+- Bizarre queues (double- and sesqui-indirect) have been removed
+  from the standard jar, but they can be still generated using the
+  target oldsources.
+
+5.1.5 -> 6.0.0
+
+- WARNING: the jar file is now fastutil.jar (not fastutil5.jar), again.
+
+- WARNING: now fastutil requires Java 6+.
+
+- fastutil is now released under the Apache License 2.0.
+
+- New framework for big arrays, represented as arrays-of-arrays.
+  BigArrays and the type-specific counterparts provide static
+  methods of all kinds.
+
+- New Size64 interface for classes implementing big collections.
+
+- New framework for big lists--lists with longs as indices. The only
+  present implementation uses big arrays, but, for instance,
+  Sux4J's succinct lists will be retrofitted to LongBigList
+  (presently they implement LongBigList from dsiutils, which will
+  be deprecated).
+
+- List.iterator() now returns a ListIterator. There is no real reason
+  not to do this, and the API change is handled from an implementation
+  viewpoint in AbstractList, so nodoby should really notice.
+
+- New Collections.asCollection(Iterable) method to expose iterables as
+  collections (missing methods are computed using the iterator). This was
+  also the occasion to streamline type-specific abstract collections,
+  which now inherit from java.util.AbstractCollection, so we support
+  contains, clear, etc. methods as long as there is an iterator.
+
+- Fixed bugged array-based constructors of ArrayMap and ArraySet.
+
+- Fixed bugged put/remove methods in abstract functions. Thanks to
+  Katja Filippova for reporting this bug.
+
+- New front-coded lists use big arrays, so they can store much more
+  (in fact, unlimited) data. Unfortunately, they are no longer
+  serialisation-compatible with previous versions.
+
+- New MeasurableStream interface that is implemented by
+  MeasurableInputStream and by a new, analogous MeasurableOutputStream.
+
+- Better FastBufferedOutputStream and FastByteArrayOutputStream that
+  are measurable and positionable.
+
+- Now all clone() methods override covariantly the defult return type
+  (Object).
+
+5.1.4 -> 5.1.5
+
+- ArraySet was implementing isEmpty() with inverted logic (thanks to
+  Marko Srdanovic for reporting this bug).
+
+- New constructor for FastMultiByteArrayInputStream: it takes a
+  MeasurableInputStream and uses length() to determine the number
+  of bytes to load into memory.
+
+5.1.3 -> 5.1.4
+
+- The implementation of RepositionableStream in FastByteArrayOutputStream
+  was fraught with a horrendous bug (thanks to Claudio Corsi for reporting),
+  in spite of extensive unit tests.
+
+5.1.2 -> 5.1.3
+
+- A bug existing since the first release was preventing tables
+  larger than 2^30 bits to work (the computation of the next bucket
+  to look at would cause an integer overflow).
+
+- FastByteArrayOutputStream now implements RepositionableStream.
+
+- Type-specific versions of Iterable.
+
+- Some methods (e.g., iterator() and values()) are now explicitly
+  re-strengthened wherever necessary to avoid complaints about
+  ambiguous method invocations by some compilers.
+
+- The introduction of functions added several bugs to the empty/singleton
+  map classes. Inheriting from the respective function counterparts left
+  several methods underspecified (equals(), etc.). This has been
+  (hopefully) fixed.
+
+5.1.1 -> 5.1.2
+
+- FastBufferedInputStream now supportw length() by FileChannel-fetching
+  on FileInputStream instances (it already used to support position()
+  by the same mechanism).
+
+5.1.0 -> 5.1.1
+
+- Byte-array MG4J I/O classes have been moved here.
+
+5.0.9 -> 5.1.0
+
+- Fixed documentation for custom/noncustom maps (it was exchanged).
+
+- New type-specify entrySet() methods that avoid complicated casting
+  to get a type-specific entryset. Moreover, now entrySet() can
+  return an object implementing Fast(Sorted)EntrySet to indicate
+  that a fastIterator() method is available. Fast iterators can
+  return always the same Entry object, suitably mutated. We thank
+  Daniel Ramage for suggesting this feature.
+
+- Several hundreds of new classes generated by the new Function interface,
+  which represent mappings for which the entry set is not enumerable
+  (e.g., hashes). Functions have their usual share of satellite objects
+  (wrappers, etc.). There are no implementations--the main purpose of
+  the new interfaces is to make Sux4J (http://sux.dsi.unimi.it/) 
+  more object-oriented.
+
+5.0.8 -> 5.0.9
+
+- Slightly reduced overhead for bound checks in heap-based queues.
+
+- BinIO was loading byte arrays one byte at a time. Now some conditionally
+  compiled code uses bulk-read methods instead. Moreover, horrible kluges
+  to work around Java bug #6478546 have been included.
+
+5.0.7 -> 5.0.8
+
+- Faster array maps and sets: System.arraycopy() is very slow on small arrays
+  (due to inherent costs of calling native code) and reflection-based array
+  creation is a disaster. Now we use object arrays and loops.
+
+- New clone() methods for array-based structures and custom serialisation.
+
+- FastBuffered*Stream has been simplified and streamlined. No more block alignment.
+
+5.0.6 -> 5.0.7
+
+- Better algorithm for front() in heaps.
+
+- New comprehensive collection of array-based maps and sets. The motivation
+  behind such structures is the need for quick, low-footprint data
+  structures for *very* small sets (say, less than 10 elements). For
+  instance, in MG4J we were using sparse reference-based hash tables, but
+  it turned out that System.identityHashCode() is *deadly* slow and
+  scanning linearly an array searching for the desired element is
+  significantly faster.
+
+5.0.5 -> 5.0.6
+
+- Due to erratic and unpredictable behaviour of InputStream.skip(), which
+  does not correspond to its specification and Sun refuses to fix (see bug
+  6222822; don't be fooled by the “closed, fixed” label),
+  FastBufferedInputStream now peeks at the underlying stream and if it is
+  System.in it uses repeated reads. Moreover, it will use alternatively
+  reads and skips to guarantee that the number of skipped bytes will be
+  smaller than requested only if end of file has been reached.
+
+- The insertion and key retrieval methods of hash-based structures are
+  now protected and final.
+
+- New front() method for indirect queues. It retrieves quickly the indices
+  associated to elements equal to the top.
+
+- First JUnit tests.
+
+5.0.4 -> 5.0.5
+
+- Fixed possible overflow in FastBufferedInputStream.available().
+
+- Indirect heaps have faster checks for elements belonging or not to the
+  queue. In particular, we just rely on array access for detecting indices
+  out of bounds. Profiling with LaMa4J showed that in some circumstances
+  checking explicitly the indices were within bounds was taking more time
+  that the actual heap inner workings.
+
+- Fixed obnoxious bug dating to the first fastutil implementation. The
+  macro KEY_EQUALS_HASH(x,h,y), which checks for equality between x and y
+  given that the hash of x is h, was evaluating hashCode() on y without
+  guarantee that y was non-null. As a result, adding a null to a mapped
+  followed by the insertion of an element with hash code 0 would have
+  thrown a NullPointerException. The bug went unobserved for years because
+  no one use nulls as keys, and was actually detected by a bug in BUbiNG's
+  code (which was in turn mistakenly inserting nulls in a set).
+
+5.0.3 -> 5.0.4
+
+- Fixed missing declaration of generic type for HASH_STRATEGY.
+
+- A new abstract class, MeasurableInputStream, is used for streams
+  whose length and current position are always known. This actually
+  was needed for BUbiNG development.
+
+- New readLine() family of method for reading "lines" directly
+  from a FastBufferedInputStream.
+
+- In FastBufferedInputStream, reset() has been deprecated in favour
+  of flush().
+
+- Array-based lists of objects now reallocate the backing array
+  using reflection *only* if they were created by wrapping. This
+  won't change the previous behaviour, but at the price of a boolean
+  per list we have unbelievably faster array reallocation.
+
+- New explicit fast load factors in Hash.
+
+5.0.2 -> 5.0.3
+
+- Bizarrily, java.util.List re-specifies iterator(), even if it extends
+  Collection. As a result, we need to re-strengthen it in type-specific lists.
+
+- Fixed new horrible bug introduced by adding Booleans to BinIO and TextIO.
+  Problem is, I didn't know #assert is cumulative.
+
+5.0.1 -> 5.0.2
+
+- Fixed bug in sorted maps key sets and values that would cause a
+  stack overflow when calling size() and a few other methods.
+
+- Fixed lack of booleans in BinIO and TextIO.
+
+- BinIO now checks for too large files.
+
+5.0 -> 5.0.1
+
+- In BinIO, it was assumed that .SIZE would give the size of
+  primitive types in *bytes*. Bad mistake.
+
+4.4.3 -> 5.0
+
+- Java 5 only!
+
+- Support for generics. This led to a number of backward-incompatible changes:
+    * toArray(Object[]) does not accept any longer null as an argument;
+    * singletons for empty collections (sets, lists, ecc.) are type-specific;
+    * iterators on sorted collections are bidirectional *by specification*;
+    * the new, covariantly stronger methods defined in all interfaces (e.g.,
+      iterator() returning a type-specific iterator) are now the default,
+      and in the abstract classes the old methods (e.g., objectIterator())
+      now just delegate to the standard method, which is the contrary
+      of what was happening before: you'll have to turn all methods
+      such as objectIterator() in iterator(), etc.
+    * all deprecated methods have been dropped.
+
+- Array growth functions now will return the correct empty array for
+  object arrays (it used to return ObjectArrays.EMPTY_ARRAY).
+
+- Strategies are generic and no longer required to accept REMOVED.
+
+- Stale references could hang around in the nodePath array for
+  Red-Black trees and map; this has been fixed.
+
+- The difference in semantics with the standard toArray(Object[])
+  specification, which has always been in place, is now exhaustively
+  explained.
+
+- Major code cleanup (mostly code deletion) due to passing fastutil
+  into Eclipse to check unused code, etc.
+
+4.4.2 -> 4.4.3
+
+- Important bug fix in FastBufferedInputStream.
+
+4.4.1 -> 4.4.2
+
+- New reset() method to invalidate the buffer of a FastBufferedInputStream,
+  making it possible to read safely files written by other processes
+  (given, of course, that you are synchronising the accesses).
+
+4.4.0 -> 4.4.1
+
+- New parallel-array constructor for all maps. Very useful for
+  static final map initialisation.
+
+- Following considerations in Jakarta Commons I/O, the standard
+  buffer size has be lowered to 8Ki.
+
+- Some arguments were declared as DataInputStream instead of
+  DataInput.
+
+- New methods for reading/writing objects from/to streams.
+
+4.3.2 -> 4.4
+
+- New static containers for reading and writing easily text and binary
+  data streams. They load/save arrays, iterators etc. to buffered readers
+  or streams.
+
+- Moved here fast input/output buffered classes from MG4J. This makes 
+  fastutil self-contained.
+
+- The trivial implementation of the type-specific iterator was missing
+  from AbstractList.drv (surprisingly, not from the subclass implementation!).
+
+- The sublist implementation in AbstractList.drv is now protected and static.
+  The attributes are protected, too.
+
+- Now we compare booleans (false<true). As a result, also lists of
+  booleans do get lexicographical comparability.
+
+- add(k) in AbstractList.drv now calls add(size(), k).
+
+- Fixed error messages for out-of-bound indices in lists.
+
+4.3.1 -> 4.3.2
+
+- Fixed small innocuous bug: a code fragment related to non-linked
+  hash table was generated for linked hash tables, too, do to a
+  case type in a preprocessor directive. The code fragment, however,
+  had no effect.
+
+- Fixed memory leak in OpenHashMap: the remove() method was not clearing
+  the key (whereas OpenHashSet was). 
+
+4.3 -> 4.3.1
+
+- New fully indirect heap-based double priority queues.
+
+- Fixed docs for queues: in 4.3, we were claiming that greater elements
+  are dequeued first, while the opposite happens.
+
+4.2 -> 4.3
+
+- New full-fledged set of unmodifiable structures *and* iterators.
+
+- Removed about a dozen spurious final method modifiers.
+
+- Made rehash() protected, so that everybody can play with different
+  rehashing strategies.
+
+- trim() in array lists wasn't doing the right thing, because trim(int)
+  wasn't doing it in the first place. Now if n is smaller than the size
+  of the list, we trim at the list size (previously we were doing nothing).
+
+- Analogously, trim() in hash-table-based structures was fixed so that
+  trimming a table below its size will result in rehashing to the minimum
+  possible size.
+
+4.1 -> 4.2
+
+- Improved array methods: now all methods on objects (e.g., grow()) return an
+  array of the same type of the array that was passed to them, similarly to
+  toArray() in collections.
+
+- Fixed missing macro substitution for empty iterator methods. In any
+  case, they were already deprecated.
+
+4.0 -> 4.1
+
+- New classes for custom hashing methods (mainly thought for
+  arrays). Correspondingly, methods for arrays have been implemented in
+  the static containers.
+
+- BasicEntry now throws an UnsupportedOperationException on calls to
+  setValue(). If you ever used that method, you got wierd results, as
+  it does not update the underlying map. The method is now implemented
+  correctly in open hash maps, in which previously did not correctly
+  update the underying map.
+
+- Reimplemented copy of an entire array using clone().
+
+- Fixed a bug in clear() for indirect heaps (the inversion array was not
+  being cleared).
+
+- Indirect priority queue interfaces now feature an optional allChanged()
+  method that can be used to force a complete heap rebuild. It is implemented by
+  all current array-based and head-based concrete classes.
+
+3.1 -> 4.0
+
+- IMPORTANT: The optimized methods that a type-specific must provide now
+  include an addElements() method that quickly adds an array of elements. As
+  usual, the method is fully implemented by the type-specific abstract lists.
+
+- IMPORTANT: The abstract generic version of get(), put() and remove() for maps
+  with non-object keys or values now always return null to denote a missing
+  key. They used to return an object-wrapped default return value.
+
+- Completely new and comprehensive implementation of priority queues, both
+  direct and indirect. Implementations are by heaps and by flat arrays. There
+  are also static containers with all relevant heap methods, for people
+  wanting to do their own thing.
+
+- New static containers for comparators.
+
+- All singletons, empty sets and snychronized wrappers are public so
+  you can inherit from them.
+
+- Abstract maps now provide keySet() and values() based on entrySet().
+
+- New abstract classes for sorted sets and maps with delegators to
+  type-specific methods.
+	
+- New public methods in Arrays and in type-specific Arrays classes for checking
+  ranges.
+
+- New static methods for type-specific arrays that allow to grow, enlarge
+  and trim them with ease.
+
+- Clarified abstract implementation of default return values, and implemented
+  clarified specification. Just a couple of method in hash maps were not
+  already compliant.
+
+- The pour() method now returns a list. The previous version was returning
+  a linked hash set, which was rather nonsensical anyway, since an iterator
+  build on the returned set could have been different from the original iterator.
+  You can always pour an iterator into a set by providing the set explicitly.
+
+- An exception-throwing implementation of some methods in AbstractSet
+  was missing. Same for AbstractCollection, AbstractMap and AbstractList.
+
+- New basic inner entry class for abstract maps, which makes it easier to write
+  entrySet() methods for classes that do not have their own entries.
+
+- Added missing get(Object) method in AbstractMap (just delegates to
+  the type-specific version).
+
+- For lazy people, now containsKey() and containsValue() in AbstractMap
+  are defined by looking into keySet() and values().
+
+- Fixed a few methods of EMPTY_LIST which were throwing exception
+  semantically (see the introduction).
+
+- The interval iterators are now list iterators, except for longs.
+
+- Fixed a bug in size() for array lists (reducing the size of an array
+  would lead to an exception).
+
+- Fixed double bug in hash tables: first of all, on very small sizes adding
+  growthFactor would have left the size unchanged, giving rise to infinite
+  loops. (Thanks to Heikki Uusitalo for reporting this bug.) Second, growthFactor
+  was not being used *at all* by hash maps.
+
+- Fixed entries emitted by singleton maps. Now they are type-specific.
+
+- Fixed a number of minor glitches in gencsource.sh, and added some comments.
+
+- HashCommon.removed has been renamed HashCommon.REMOVED.
+
+- Boolean objects are now generated using valueOf() instead of the constructor.
+
+- New type-specific wrappers for list iterators.
+
+3.0 -> 3.1
+
+- IMPORTANT: it.unimi.dsi.fastutil.Iterators methods have
+  been spread in type-specific static containers.
+
+- New Stack interface, implemented by type-specific lists.
+
+- New static container classes Collections, Sets, and Lists.
+  Presently they just provide empty containers.
+
+- New type-specific static contains (e.g., IntSets) providing
+  singletons and synchronized wrappers.
+
+- Entry sets now have entries that are equal() to entries
+  coming from corresponding maps in java.util.
+
+- Spelling everywhere changed to Pure American. "synchronized" in code and
+  "synchronise" in text side-by-side were looking really wierd...
+
+3.0 -> 3.0.1
+
+- New unwrap() methods for type-specific collections.
+
+- Fixed old-as-world-bug, apparently wide but that evidently no one ever
+  noticed: AbstractMap was not serialisable, and, as a result, the default
+  return value was not serialised  (I find sincerely counterintuitive that
+  making a class serialisable doesn't do the same for its supertypes). It
+  wasn't ever even *documented* as preserved, so probably everyone thought
+  this was my idea, too. Too bad this breaks once more serialisation
+  compatibility. Since I had to break some serialisation anyway, I decided
+  to eliminate the residual serialisation of p in hash table classes, too
+  (which breaks serialisation for all hash-based classes).
+
+2.60 -> 3.0
+
+- IMPORTANT: All classes have been repackaged following the type of
+  elements/keys. Sources will have to be retouched (just to change
+  the import clause) and recompiled.
+	
+- IMPORTANT: Because of an unavoidable name clash in the new type-specific list
+  interface, the method remove(int) of IntCollection has been renamed rem(int).
+  The only really unpleasant effect is that you must use rem(int) on variables of
+  type IntCollection that are not of type IntSet (as IntSet reinstates
+  remove(int) in its right place)--for instance, IntList.
+
+- Brand-new implementation of type-specific lists, with all the features you'd
+  expect and more.
+
+- Insertions for readObject() in hash tables are now handled in a special way
+  (20% faster).
+
+- Implemented linear-time tree reconstruction for readObject() (in practice, more
+  than twice faster).
+
+- Fixed a problem with serialisation of hash tables: the table would have
+  been reloaded with the same p, even if it was preposterous. We still
+  save p, however, to avoid breaking serialisation compatibility.
+
+- Fixed missing implementation of type-specific sets, which should
+  have extended type-specific collections, but they weren't.
+
+- The default return value is now protected.
+
+- New family of pour() methods that pour an iterator into a set.
+
+- New programmable growth factor for hash-table-based classes.
+
+- Eliminated a few useless method calls in tree map.
+
+- Wide range of complex assertions, which are compiled in or out using the
+  "private static final boolean" idiom.
+
+- For references we now use System.identityHashCode(); this shouldn't change
+  much, but it seems definitely more sensible.
+
+- Fixed major bug in subSet()/subMap(): creating a subMap of a tailMap (or
+  headMap) a right extreme (left, resp.) equal to 0 would have caused the
+  creation of a tailMap (or headMap, resp.), discarding the extreme. Very,
+  very unlikely, but it happened in a test.
+
+- Fixed small bug in standard remove() method of submaps, which would have
+  returned a default return value wrapped in a suitable object instead of
+  null on non-existing keys.
+
+2.52 -> 2.60
+
+- IMPORTANT: Major overhaul of iterators. Now iterators must be skippable,
+  so previous implementation of type-specific iterator interfaces will
+  not work. However, new abstract classes allow to build iterator with ease
+  by providing for free the skipping logic, and many useful static methods in
+  Iterators allow to generate type-specific iterators wrapping standard
+  iterators, arrays, etc.
+
+- Better strategy for clear() on hash tables: we don't do anything only
+  if all entries are free (which means that an empty table with deleted
+  entry will be cleared).
+
+2.51 -> 2.52
+
+- IMPORTANT: The package name has changed to it.unimi.dsi.fastutil to be
+  uniform with JPackage conventions. However, this means that you must manually
+  erase the old one and update your sources.
+
+- clear() doesn't do anything on empty hash tables.
+
+2.50 -> 2.51
+- New trim(int) method to reduce a hash table size avoiding to
+  make it too small.
+
+- serialVersionUID is now fixed, to avoid future incompatibilities.
+
+2.11 -> 2.50
+
+- IMPORTANT: The Collection interface now prescribes an iterator
+  method with a type-specific name (e.g., intIterator()) that
+  returns directly a type-specific iterator.
+
+- New Reference maps and sets that allow to store more quickly
+  canonised objects.
+
+- New linked maps mimicking java.util's, but with a boatload
+  of additional features.
+
+- Small bug fix: the get(Object) method would return null
+  instead of the default return value for maps with object
+  keys.
+
+- Major bug fix: iterating backwards on submaps was leading
+  to unpredictable results.
+
+- Major bug fix: cloning maps would have caused inconsistent behaviour.
+
+- Major code redistribution: now whenever possible wrappers 
+  belong to abstract superclasses.
+
+2.1 -> 2.11
+
+- Now we cache the hash of an object before entering
+  the hash table loop.
+
+2.0 -> 2.1
+
+- A simple optimisation in hash-table inner loops has given
+  quite a performance boost under certain conditions (we do
+  not compute the secondary hashing if it is not necessary). Inspired
+  by Gnu Trove.
+- The trim() method would have in fact trimmed nothing, just
+  rehashed the table.
+- The computed maxFill value was sligtly too small.
+- Also tree sets now have constructors from arrays.
+- More internal methods have been made final.
+
+1.3 -> 2.0
+
+- ALL MAPS AND SETS HAVE NEW NAMES DEPENDING ON THE IMPLEMENTATION.
+- Introducing new high-performance, low memory-footprint implementation of
+  SortedMap and SortedSet.
+- Two tree implementations are available: RB trees and
+  AVL trees. Both implementations are threaded. See the README.
+- Fixed a bug in hashCode() and contains() for HashMap.drv (it was
+  considering keys only!).
+- Fixed a bug in contains() for entrySet() in all maps (it was using
+  VALUE_EQUAL to test equality for values given as objects).
+- I realised that a default return value can be useful also for maps and sets
+  returning objects, so now you have it. It is even independent for submaps and
+  subsets.
+- Classes are no longer final. The performance gain is around 1%, and
+  the decrease in usefulness is orders of magnitudes greater.
+- We now check equality using first hashCode() and then equals().
+- The tests for speed now warm up the trees by doing repeated insertions
+  and deletions, so that the benefits of a better balancing criterion
+  are more evident.
+- The regression tests are much more stringent.
+- Fixed hashCode() for hash maps (wasn't conforming to the Map interface
+  specification).
+- Implemented linear cloning for tree classes.
diff --git a/LICENSE-2.0 b/LICENSE-2.0
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/LICENSE-2.0
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/README b/README
new file mode 100644
index 0000000..ea9c787
--- /dev/null
+++ b/README
@@ -0,0 +1,44 @@
+Welcome to fastutil, a collection of type-specific Java classes that
+extend the Java Collections Framework by providing several containers,
+such as maps, sets, lists and prority queues, implementing the interfaces
+of the java.util package; it provides also big (64-bit) arrays, sets and
+lists, and fast, practical I/O classes for binary and text files.
+
+fastutil provides a huge collection of specialized classes generated
+starting from a parameterized version; the classes are much more compact
+and much faster than the general ones. Please read the package
+documentation for more information.
+
+With release 6, fastutil becomes available under the Apache License 2.0
+and runs only on Java 6 or newer.
+
+fastutil 6.1.0 has been significantly reorganised. A number of
+not-so-useful classes (double- and sequi-indirect priority queues) are no
+longer distributed (but you can still generate them from source code). The
+old implementation of hash tables (both sets and maps) has been replaced
+by a linear-probing implementation that is about twice faster and has true
+deletions, but does not let you set a growth factor. You can still generate
+the sources of those classes, in any case.
+
+The compiled code is contained in the jar file, and should be installed
+where you keep Java extensions. Note that the jar file is huge, due to the
+large number of classes: if you plan to ship your own jar with some
+fastutil classes included, you should look at AutoJar to extract
+automatically the necessary classes.
+
+You have to "make sources" to get the actual Java sources; finally, "ant
+jar" and "ant javadoc" will generate the jar file and the API
+documentation. Note that you need ant (http://jakarta.apache.org/ant/).
+The target "oldsources" generates the old code for priority queues and
+hash sets/maps, and the ant task "jar-oldsources" includes what is
+necessary in the jar file.
+
+The Java sources are generated using a C preprocessor. The gencsource.sh
+script reads in a driver file, that is, a Java source that uses some
+preprocessor-defined symbols and some conditional compilation, and
+produces a (fake) C source, which includes the driver code and some
+#define that customize the environment.
+
+
+
+                                          seba (vigna at acm.org)
diff --git a/build.properties b/build.properties
new file mode 100644
index 0000000..bad9ee1
--- /dev/null
+++ b/build.properties
@@ -0,0 +1,20 @@
+jar.base=/usr/share/java
+javadoc.base=/usr/share/javadoc
+
+build.sysclasspath=ignore
+
+version=6.5.4
+
+dist=dist
+src=src
+drv=drv
+test=test
+reports=reports
+coverage=coverage
+checkstyle=checkstyle
+docs=docs
+build=build
+instrumented=instrumented
+
+remote.j2se.apiurl=http://download.oracle.com/javase/6/docs/api/
+local.j2se.apiurl=file://${javadoc.base}/java
diff --git a/build.xml b/build.xml
new file mode 100644
index 0000000..7c7c346
--- /dev/null
+++ b/build.xml
@@ -0,0 +1,227 @@
+<project name="fastutil" default="jar" basedir="." xmlns:artifact="antlib:org.apache.maven.artifact.ant">
+
+	<property file="build.properties"/>
+
+	<!-- ************************************** WARNING: MAVEN SH*T ************************************** -->
+
+	<!-- define Maven coordinates -->
+	<property name="groupId" value="it.unimi.dsi" />
+	<property name="artifactId" value="fastutil" />
+	<property name="version" value="${version}" />
+
+        <!-- define artifacts' name, which follows the convention of Maven -->
+	<property name="maven-jar" value="${dist}/lib/${artifactId}-${version}.jar" />
+	<property name="maven-javadoc-jar" value="${dist}/lib/${artifactId}-${version}-javadoc.jar" />
+	<property name="maven-sources-jar" value="${dist}/lib/${artifactId}-${version}-sources.jar" />
+
+        <!-- defined maven snapshots and staging repository id and url -->
+	<property name="maven-snapshots-repository-id" value="sonatype-nexus-snapshots" />
+	<property name="maven-snapshots-repository-url" value="https://oss.sonatype.org/content/repositories/snapshots/" />
+	<property name="maven-staging-repository-id" value="sonatype-nexus-staging" />
+	<property name="maven-staging-repository-url" value="https://oss.sonatype.org/service/local/staging/deploy/maven2/" />
+
+	<target name="dist" depends="compile,javadoc" description="generate the distribution">
+
+		<!-- build the main artifact -->
+		<jar jarfile="${maven-jar}" basedir="${build}" />
+
+		<!-- build the javadoc artifact (from symbolic link created in init) -->
+		<jar jarfile="${maven-javadoc-jar}">
+			<fileset dir="${dist}/javadoc" />
+		</jar>
+
+		<!-- build the sources artifact -->
+		<jar jarfile="${maven-sources-jar}">
+			<fileset dir="." includes="README,CHANGES,LICENSE-2.0,build.xml,build.properties,makefile,{drv}/*.drv,gencsource.sh,${src}/it/unimi/dsi/fastutil/**/*.java,${src}/it/unimi/dsi/fastutil/**/*.html,${test}/**/*.java"/>
+		</jar>
+	</target>
+
+	<target name="deploy" depends="dist" description="deploy snapshot version to Maven snapshot repository">
+		<artifact:mvn>
+			<arg value="org.apache.maven.plugins:maven-deploy-plugin:2.6:deploy-file" />
+			<arg value="-Durl=${maven-snapshots-repository-url}" />
+			<arg value="-DrepositoryId=${maven-snapshots-repository-id}" />
+			<arg value="-DpomFile=pom.xml" />
+			<arg value="-Dfile=${maven-jar}" />
+		</artifact:mvn>
+	</target>
+
+	<target name="stage" depends="dist" description="deploy release version to Maven staging repository">
+		<!-- sign and deploy the main artifact -->
+		<artifact:mvn>
+			<arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+			<arg value="-Durl=${maven-staging-repository-url}" />
+			<arg value="-DrepositoryId=${maven-staging-repository-id}" />
+			<arg value="-DpomFile=pom.xml" />
+			<arg value="-Dfile=${maven-jar}" />
+                        <arg value="-Pgpg" />
+		</artifact:mvn>
+
+		<!-- sign and deploy the sources artifact -->
+		<artifact:mvn>
+			<arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+			<arg value="-Durl=${maven-staging-repository-url}" />
+			<arg value="-DrepositoryId=${maven-staging-repository-id}" />
+			<arg value="-DpomFile=pom.xml" />
+			<arg value="-Dfile=${maven-sources-jar}" />
+			<arg value="-Dclassifier=sources" />
+                        <arg value="-Pgpg" />
+		</artifact:mvn>
+
+		<!-- sign and deploy the javadoc artifact -->
+		<artifact:mvn>
+			<arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+			<arg value="-Durl=${maven-staging-repository-url}" />
+			<arg value="-DrepositoryId=${maven-staging-repository-id}" />
+			<arg value="-DpomFile=pom.xml" />
+			<arg value="-Dfile=${maven-javadoc-jar}" />
+			<arg value="-Dclassifier=javadoc" />
+                        <arg value="-Pgpg" />
+		</artifact:mvn>
+	</target>
+
+	<!-- ************************************** END OF MAVEN SH*T ************************************** -->
+
+	<condition property="j2se.apiurl" value="${local.j2se.apiurl}" else="${remote.j2se.apiurl}"><isset property="local"/></condition>
+
+	<property name="j2se.apiurl" value="http://java.sun.com/j2se/5.0/docs/api/"/>
+
+	<path id="emma.lib" >
+		<pathelement location="${jar.base}/emma.jar" />
+		<pathelement location="${jar.base}/emma_ant.jar" />
+	</path>
+
+	<taskdef resource="emma_ant.properties" classpathref="emma.lib" />
+
+	<target name="init">
+		<mkdir dir="${build}"/>
+		<mkdir dir="${dist}/lib"/>
+		<symlink link="${dist}/javadoc" resource="../${docs}" overwrite="true"/>
+	</target>
+
+	<target name="compile" depends="init">
+		<javac srcdir="${src}" 
+			debug="on"
+			deprecation="on"
+			optimize="on"
+			destdir="${build}"
+			memoryInitialSize="800M"
+			memoryMaximumSize="800M"
+			fork="yes"
+			source="1.6"
+			target="1.6"
+		/>
+	</target>
+
+	<target name="compile-tests" depends="compile">
+		<javac srcdir="${test}" 
+			debug="on"
+			deprecation="on"
+			optimize="on"
+			destdir="${build}"
+			memoryInitialSize="800M"
+			memoryMaximumSize="800M"
+			fork="yes"
+			source="1.6"
+			target="1.6"
+		/>
+	</target>
+
+	<target name="jar" depends="compile">
+		<jar jarfile="fastutil-${version}.jar">
+			<fileset dir="${build}" excludes="it/unimi/dsi/fastutil/*IndirectDoublePriorityQueue*"/>
+	 </jar>
+	</target>
+
+	<target name="jar-tests" depends="compile">
+		<jar jarfile="fastutil-${version}.jar">
+			<fileset dir="${build}" excludes="it/unimi/dsi/fastutil/*IndirectDoublePriorityQueue*"/>
+	 </jar>
+	</target>
+
+	<target name="jar-oldsources" depends="compile">
+		<jar jarfile="fastutil-${version}.jar">
+			<fileset dir="${build}"/>
+	 </jar>
+	</target>
+
+	<target name="javadoc">
+		<mkdir dir="${docs}"/>
+		<javadoc destdir="${docs}" 
+			 packagenames = "it.unimi.dsi.fastutil,it.unimi.dsi.fastutil.*"
+			 overview="${src}/overview.html"
+			 sourcepath="${src}" 
+			 public="on"
+			 source="${source}"
+			 windowtitle="fastutil ${version}"
+			 additionalparam="-breakiterator"
+			 maxmemory="800M"
+			 >
+	<link href="${j2se.apiurl}"/>
+		</javadoc>
+	</target>
+
+	<target name="junit" depends="instrument" description="Runs JUnit tests">
+
+		<junit printsummary="yes" fork="yes" haltonfailure="off"  haltonerror="off">
+			<classpath location="${instrumented}/classes"/>
+			<classpath location="${src}"/>
+			<classpath location="${jar.base}/emma.jar"/>
+			<jvmarg value="-Xmx1G" />
+			<jvmarg value="-Demma.coverage.out.file=${coverage}/coverage.emma" />
+			<jvmarg value="-Demma.coverage.out.merge=true" />
+
+			<formatter type="xml"/>
+			<formatter type="plain"/>
+
+			<batchtest fork="yes" todir="${reports}">
+				<fileset dir="${instrumented}/classes">
+					<include name="**/*Test.class"/>
+				</fileset>
+			</batchtest>
+		</junit>
+
+		<junitreport todir="reports">
+			<fileset dir="reports">
+				<include name="TEST-*.xml"/>
+			</fileset>
+			<report todir="reports/html"/>
+		</junitreport>
+
+		<emma>
+			<report sourcepath="${src}" >
+				<fileset file="${coverage}/*a"/>
+				<html outfile="coverage.html" />
+				<xml outfile="${coverage}/coverage.xml" />
+			</report>
+		</emma>
+	</target>
+
+	<target name="instrument" depends="compile" description="Generate instrumented classes">
+		<emma>
+			<instr mode="fullcopy"
+				 outdir="${instrumented}"
+				 merge="no"
+				 metadatafile="${coverage}/metadata.emma"
+				 instrpath="${build}"
+			>
+				<filter excludes="*Test*"/>
+			</instr>
+		</emma>
+	</target>
+
+
+	<!-- ************		CLEAN		********************* -->
+	<target name="clean">
+		<delete dir="${build}"/>
+		<delete dir="${dist}"/>
+		<delete dir="${reports}"/>
+		<delete dir="${coverage}"/>
+		<delete dir="${instrumented}"/>
+		<delete dir="${docs}"/>
+		<delete>
+			<fileset dir="." includes="fastutil-*.jar"/>
+		</delete>
+  </target>
+
+</project>
diff --git a/debian/changelog b/debian/changelog
deleted file mode 100644
index 40f526a..0000000
--- a/debian/changelog
+++ /dev/null
@@ -1,5 +0,0 @@
-libfastutil-java (6.5.4-1) unstable; urgency=low
-
-  * Initial release (Closes: #711212)
-
- -- Andreas Tille <tille at debian.org>  Wed, 05 Jun 2013 14:51:51 +0200
diff --git a/debian/compat b/debian/compat
deleted file mode 100644
index ec63514..0000000
--- a/debian/compat
+++ /dev/null
@@ -1 +0,0 @@
-9
diff --git a/debian/control b/debian/control
deleted file mode 100644
index 666e14b..0000000
--- a/debian/control
+++ /dev/null
@@ -1,57 +0,0 @@
-Source: libfastutil-java
-Maintainer: Debian Java Maintainers <pkg-java-maintainers at lists.alioth.debian.org>
-Uploaders: Andreas Tille <tille at debian.org>
-Section: java
-Priority: optional
-Build-Depends: debhelper (>= 9),
-               javahelper,
-               default-jdk,
-               ant
-Standards-Version: 3.9.4
-Vcs-Svn: svn://anonscm.debian.org/pkg-java/trunk/libfastutil-java
-Vcs-Browser: http://anonscm.debian.org/viewvc/pkg-java/trunk/libfastutil-java
-Homepage: http://fastutil.di.unimi.it/
-
-Package: libfastutil-java
-Architecture: all
-Depends: ${misc:Depends},
-         ${java:Depends}
-Recommends: ${java:Recommends}
-Description: Java API providing type-specific maps, sets, lists and queues
- Fastutil extends the Java™ Collections Framework by providing
- type-specific maps, sets, lists and queues with a small memory footprint
- and fast access and insertion; provides also big (64-bit) arrays, sets
- and lists, and fast, practical I/O classes for binary and text files.
- .
- The classes implement their standard counterpart interface (e.g., Map
- for maps) and can be plugged into existing code. Moreover, they provide
- additional features (such as bidirectional iterators) that are not
- available in the standard classes.
- .
- Besides objects and primitive types, fastutil classes provide support
- for references, that is, objects that are compared using the equality
- operator rather than the equals() method. 
-
-Package: libfastutil-java-doc
-Architecture: all
-Section: doc
-Depends: ${misc:Depends},
-         ${java:Depends}
-Recommends: ${java:Recommends}
-Description: API documentation for libfastutil-java
- Fastutil extends the Java™ Collections Framework by providing
- type-specific maps, sets, lists and queues with a small memory footprint
- and fast access and insertion; provides also big (64-bit) arrays, sets
- and lists, and fast, practical I/O classes for binary and text files.
- .
- The classes implement their standard counterpart interface (e.g., Map
- for maps) and can be plugged into existing code. Moreover, they provide
- additional features (such as bidirectional iterators) that are not
- available in the standard classes.
- .
- Besides objects and primitive types, fastutil classes provide support
- for references, that is, objects that are compared using the equality
- operator rather than the equals() method. 
- .
- This package contains the javadoc documentation files.
-
diff --git a/debian/copyright b/debian/copyright
deleted file mode 100644
index e28a427..0000000
--- a/debian/copyright
+++ /dev/null
@@ -1,44 +0,0 @@
-Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
-Upstream-Contact: Sebastiano Vigna <vigna at acm.org>
-Source: http://fastutil.di.unimi.it/
-
-Files: *
-Copyright: © 2003-2013 Sebastiano Vigna <vigna at acm.org>, Paolo Boldi
-License: Apache-2.0
-
-Files: src/it/unimi/dsi/fastutil/io/*.java test/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStreamTest.java
-Copyright: © 2003-2011 Sebastiano Vigna <vigna at acm.org>, Paolo Boldi
-License: LGPLv2.1+
- This library is free software; you can redistribute it and/or modify it
- under the terms of the GNU Lesser General Public License as published by the Free
- Software Foundation; either version 2.1 of the License, or (at your option)
- any later version.
- .
- This library is distributed in the hope that it will be useful, but
- WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
- or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
- for more details.
- .
- On Debian systems you can find a copy of GNU Lesser General Public License
- at /usr/share/common-licenses/LGPL-2.1
-
-Files: debian/*
-Copyright: © 2013 Andreas Tille <tille at debian.org>
-License: Apache-2.0 
-
-License: Apache-2.0 
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- .
-      http://www.apache.org/licenses/LICENSE-2.0
- .
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License. 
- .
- On Debian systems you can find a copy of Apache 2.0 at
- /usr/share/common-licenses/Apache-2.0
-
diff --git a/debian/libfastutil-java-doc.javadoc b/debian/libfastutil-java-doc.javadoc
deleted file mode 100644
index 7f94ad4..0000000
--- a/debian/libfastutil-java-doc.javadoc
+++ /dev/null
@@ -1 +0,0 @@
-docs /usr/share/doc/libfastutil-java
diff --git a/debian/libfastutil-java.jlibs b/debian/libfastutil-java.jlibs
deleted file mode 100644
index 5541373..0000000
--- a/debian/libfastutil-java.jlibs
+++ /dev/null
@@ -1 +0,0 @@
-fastutil*.jar
diff --git a/debian/rules b/debian/rules
deleted file mode 100755
index 88bcb40..0000000
--- a/debian/rules
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/make -f
-
-# DH_VERBOSE := 1
-
-%:
-	dh $@ --with javahelper
-
-override_dh_auto_clean:
-	dh_auto_clean
-	rm -f fastutil-*.jar
-	rm -rf build dist docs
-	find src/it/unimi/dsi/fastutil -name "*.[ch]" -delete
-	# delete auto-generated *.java files.
-        # Attention: This
-	# 	find src/it/unimi/dsi/fastutil -mindepth 2 -name "*.java" -delete
-	# does not work because files in /io need to remain
-	find \
-		src/it/unimi/dsi/fastutil/booleans \
-		src/it/unimi/dsi/fastutil/bytes \
-		src/it/unimi/dsi/fastutil/chars \
-		src/it/unimi/dsi/fastutil/doubles \
-		src/it/unimi/dsi/fastutil/floats \
-		src/it/unimi/dsi/fastutil/ints \
-		src/it/unimi/dsi/fastutil/longs \
-		src/it/unimi/dsi/fastutil/objects \
-		src/it/unimi/dsi/fastutil/shorts \
-		-name "*.java" -delete
-	rm -f src/it/unimi/dsi/fastutil/io/BinIO.java src/it/unimi/dsi/fastutil/io/TextIO.java
-
-override_dh_auto_build:
-	make sources
-	# make sources TEST=1		# will compile behavioral and speed tests into the classes
-	# make sources ASSERTS=1	# will compile assertions into the classes
-	ant jar
-	ant javadoc
-
-override_dh_builddeb:
-	dh_builddeb -- -Z xz
-
-get-orig-source:
-	mkdir -p ../tarballs
-	uscan --verbose --force-download --destdir ../tarballs
diff --git a/debian/source/format b/debian/source/format
deleted file mode 100644
index 163aaf8..0000000
--- a/debian/source/format
+++ /dev/null
@@ -1 +0,0 @@
-3.0 (quilt)
diff --git a/debian/watch b/debian/watch
deleted file mode 100644
index b7302bd..0000000
--- a/debian/watch
+++ /dev/null
@@ -1,2 +0,0 @@
-version=3
-http://fastutil.di.unimi.it/fastutil-([.\d]+)-src\.tar\.gz
diff --git a/drv/AVLTreeMap.drv b/drv/AVLTreeMap.drv
new file mode 100644
index 0000000..37c7da4
--- /dev/null
+++ b/drv/AVLTreeMap.drv
@@ -0,0 +1,2746 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet;
+import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator;
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+import it.unimi.dsi.fastutil.objects.ObjectSortedSet;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION;
+import VALUE_PACKAGE.VALUE_ITERATOR;
+
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.NoSuchElementException;
+
+#if #values(primitive)
+import VALUE_PACKAGE.VALUE_LIST_ITERATOR;
+#endif
+
+/** A type-specific AVL tree map with a fast, small-footprint implementation.
+ *
+ * <P>The iterators provided by the views of this class are type-specific {@linkplain
+ * it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}.
+ * Moreover, the iterator returned by <code>iterator()</code> can be safely cast
+ * to a type-specific {@linkplain java.util.ListIterator list iterator}.
+ */
+
+public class AVL_TREE_MAP KEY_VALUE_GENERIC extends ABSTRACT_SORTED_MAP  KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable {
+
+	/** A reference to the root entry. */
+	protected transient Entry KEY_VALUE_GENERIC tree;
+
+	/** Number of entries in this map. */
+	protected int count;
+
+	/** The first key in this map. */
+	protected transient Entry KEY_VALUE_GENERIC firstEntry;
+
+	/** The last key in this map. */
+	protected transient Entry KEY_VALUE_GENERIC lastEntry;
+
+	/** Cached set of entries. */
+	protected transient volatile ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> entries;
+
+	/** Cached set of keys. */
+	protected transient volatile SORTED_SET KEY_GENERIC keys;
+
+	/** Cached collection of values. */
+	protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+
+	/** The value of this variable remembers, after a <code>put()</code> 
+	 * or a <code>remove()</code>, whether the <em>domain</em> of the map
+	 * has been modified. */
+	protected transient boolean modified;
+
+	/** This map's comparator, as provided in the constructor. */
+	protected Comparator<? super KEY_GENERIC_CLASS> storedComparator;
+
+	/** This map's actual comparator; it may differ from {@link #storedComparator} because it is
+		always a type-specific comparator, so it could be derived from the former by wrapping. */
+	protected transient KEY_COMPARATOR KEY_SUPER_GENERIC actualComparator;
+
+	private static final long serialVersionUID = -7046029254386353129L;
+
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	{
+		allocatePaths();
+	}
+
+	/** Creates a new empty tree map. 
+	 */
+
+	public AVL_TREE_MAP() {
+		tree = null;
+		count = 0;
+	}
+
+	/** Generates the comparator that will be actually used.
+	 *
+	 * <P>When a specific {@link Comparator} is specified and stored in {@link
+	 * #storedComparator}, we must check whether it is type-specific.  If it is
+	 * so, we can used directly, and we store it in {@link #actualComparator}. Otherwise,
+	 * we generate on-the-fly an anonymous class that wraps the non-specific {@link Comparator}
+	 * and makes it into a type-specific one.
+	 */
+	@SuppressWarnings("unchecked")
+	private void setActualComparator() {
+#if #keyclass(Object)
+		actualComparator = storedComparator;
+#else
+		/* If the provided comparator is already type-specific, we use it. Otherwise,
+		   we use a wrapper anonymous class to fake that it is type-specific. */
+		if ( storedComparator == null || storedComparator instanceof KEY_COMPARATOR ) actualComparator = (KEY_COMPARATOR)storedComparator;
+		else actualComparator =	new KEY_COMPARATOR KEY_SUPER_GENERIC() {
+				public int compare( KEY_GENERIC_TYPE k1, KEY_GENERIC_TYPE k2 ) {
+					return storedComparator.compare( KEY2OBJ( k1 ), KEY2OBJ( k2 ) );
+				}
+				public int compare( KEY_GENERIC_CLASS ok1, KEY_GENERIC_CLASS ok2 ) {
+					return storedComparator.compare( ok1, ok2 );
+				}
+			};
+#endif
+	}
+	 
+
+	/** Creates a new empty tree map with the given comparator.
+	 *
+	 * @param c a (possibly type-specific) comparator.
+	 */
+
+	public AVL_TREE_MAP( final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this();
+		storedComparator = c;
+		setActualComparator();
+	}
+
+
+	/** Creates a new tree map copying a given map.
+	 *
+	 * @param m a {@link Map} to be copied into the new tree map. 
+	 */
+	 
+	public AVL_TREE_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) {
+		this();
+		putAll( m );
+	}
+
+	/** Creates a new tree map copying a given sorted map (and its {@link Comparator}).
+	 *
+	 * @param m a {@link SortedMap} to be copied into the new tree map. 
+	 */
+	 
+	public AVL_TREE_MAP( final SortedMap<KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS> m ) {
+		this( m.comparator() );
+		putAll( m );
+	}
+
+	/** Creates a new tree map copying a given map.
+	 *
+	 * @param m a type-specific map to be copied into the new tree map. 
+	 */
+	 
+	public AVL_TREE_MAP( final MAP KEY_VALUE_EXTENDS_GENERIC m ) {
+		this();
+		putAll( m );
+	}
+
+	/** Creates a new tree map copying a given sorted map (and its {@link Comparator}).
+	 *
+	 * @param m a type-specific sorted map to be copied into the new tree map. 
+	 */
+	 
+	public AVL_TREE_MAP( final SORTED_MAP KEY_VALUE_GENERIC m ) {
+		this( m.comparator() );
+		putAll( m );
+	}
+
+	/** Creates a new tree map using the elements of two parallel arrays and the given comparator.
+	 *
+	 * @param k the array of keys of the new tree map.
+	 * @param v the array of corresponding values in the new tree map.
+	 * @param c a (possibly type-specific) comparator.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public AVL_TREE_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this( c );
+		if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
+		for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
+	}
+
+	/** Creates a new tree map using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new tree map.
+	 * @param v the array of corresponding values in the new tree map.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public AVL_TREE_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[] ) {
+		this( k, v, null );
+	}
+
+	/*
+	 * The following methods implements some basic building blocks used by
+	 * all accessors.  They are (and should be maintained) identical to those used in AVLTreeSet.drv.
+	 *
+	 * The put()/remove() code is derived from Ben Pfaff's GNU libavl
+	 * (http://www.msu.edu/~pfaffben/avl/). If you want to understand what's
+	 * going on, you should have a look at the literate code contained therein
+	 * first.  
+	 */
+
+
+	/** Compares two keys in the right way. 
+	 *
+	 * <P>This method uses the {@link #actualComparator} if it is non-<code>null</code>.
+	 * Otherwise, it resorts to primitive type comparisons or to {@link Comparable#compareTo(Object) compareTo()}.
+	 *
+	 * @param k1 the first key.
+	 * @param k2 the second key.
+	 * @return a number smaller than, equal to or greater than 0, as usual
+	 * (i.e., when k1 < k2, k1 = k2 or k1 > k2, respectively).
+	 */
+	 
+	@SuppressWarnings("unchecked")
+	final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) {
+		return actualComparator == null ? KEY_CMP( k1, k2 ) : actualComparator.compare( k1, k2 );
+	}
+
+
+
+	/** Returns the entry corresponding to the given key, if it is in the tree; <code>null</code>, otherwise.
+	 *
+	 * @param k the key to search for.
+	 * @return the corresponding entry, or <code>null</code> if no entry with the given key exists.
+	 */
+
+	final Entry KEY_VALUE_GENERIC findKey( final KEY_GENERIC_TYPE k ) {
+		Entry KEY_VALUE_GENERIC e = tree;
+		int cmp;
+		 
+		while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) e = cmp < 0 ? e.left() : e.right();
+
+		return e;
+	}
+
+	/** Locates a key.
+	 *
+	 * @param k a key.
+	 * @return the last entry on a search for the given key; this will be
+	 * the given key, if it present; otherwise, it will be either the smallest greater key or the greatest smaller key.
+	 */
+
+	final Entry KEY_VALUE_GENERIC locateKey( final KEY_GENERIC_TYPE k ) {
+		Entry KEY_VALUE_GENERIC e = tree, last = tree;
+		int cmp = 0;
+		  
+		while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) {
+			last = e;
+			e = cmp < 0 ? e.left() : e.right();
+		}
+		  
+		return cmp == 0 ? e : last;
+	}
+
+	/** This vector remembers the directions followed during 
+	 * the current insertion. It suffices for about 2<sup>32</sup> entries. */
+	private transient boolean dirPath[];
+
+	private void allocatePaths() {
+		dirPath = new boolean[ 48 ];
+	}
+
+
+	/* After execution of this method, modified is true iff a new entry has
+	been inserted. */
+	 
+	public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) {
+		modified = false;
+
+		if ( tree == null ) { // The case of the empty tree is treated separately.
+			count++;
+			tree = lastEntry = firstEntry = new Entry KEY_VALUE_GENERIC( k, v );
+			modified = true;
+		}
+		else {
+			Entry KEY_VALUE_GENERIC p = tree, q = null, y = tree, z = null, e = null, w = null;
+			int cmp, i = 0;
+
+			while( true ) {
+				if ( ( cmp = compare( k, p.key ) ) == 0 ) {
+					final VALUE_GENERIC_TYPE oldValue = p.value;
+					p.value = v;
+					return oldValue;
+				}
+					 
+				if ( p.balance() != 0 ) {
+					i = 0;
+					z = q;
+					y = p;
+				}
+					 
+				if ( dirPath[ i++ ] = cmp > 0 ) {
+					if ( p.succ() ) {
+						count++;
+						e = new Entry KEY_VALUE_GENERIC( k, v );
+								
+						modified = true; 
+						if ( p.right == null ) lastEntry = e;
+								
+						e.left = p;
+						e.right = p.right;
+								
+						p.right( e );
+								
+						break;
+					}
+
+					q = p;
+					p = p.right;
+				}
+				else {
+					if ( p.pred() ) {
+						count++;
+						e = new Entry KEY_VALUE_GENERIC( k, v );
+								
+						modified = true;
+						if ( p.left == null ) firstEntry = e;
+								
+						e.right = p;
+						e.left = p.left;
+								
+						p.left( e );
+
+						break;
+					}
+
+					q = p;
+					p = p.left;
+				}
+			}
+
+			p = y;
+			i = 0;
+
+			while( p != e ) {
+				if ( dirPath[ i ] ) p.incBalance();
+				else p.decBalance();
+
+				p = dirPath[ i++ ] ? p.right : p.left;
+			}
+				
+			if ( y.balance() == -2 ) {
+				Entry KEY_VALUE_GENERIC x = y.left;
+
+				if ( x.balance() == -1 ) {
+					w = x;
+					if ( x.succ() ) {
+						x.succ( false );
+						y.pred( x );
+					}
+					else y.left = x.right;
+
+					x.right = y;
+					x.balance( 0 );
+					y.balance( 0 );
+				}
+				else {
+					if ( ASSERTS ) assert x.balance() == 1;
+
+					w = x.right;
+					x.right = w.left;
+					w.left = x;
+					y.left = w.right;
+					w.right = y;
+					if ( w.balance() == -1 ) {
+						x.balance( 0 );
+						y.balance( 1 );
+					}
+					else if ( w.balance() == 0 ) {
+						x.balance( 0 );
+						y.balance( 0 );
+					}
+					else {
+						x.balance( -1 );
+						y.balance( 0 );
+					}
+					w.balance( 0 );
+
+
+					if ( w.pred() ) {
+						x.succ( w );
+						w.pred( false );
+					}
+					if ( w.succ() ) {
+						y.pred( w );
+						w.succ( false );
+					}
+
+				}
+			}
+			else if ( y.balance() == +2 ) {
+				Entry KEY_VALUE_GENERIC x = y.right;
+
+				if ( x.balance() == 1 ) {
+					w = x;
+					if ( x.pred() ) {
+						x.pred( false );
+						y.succ( x );
+					}
+					else y.right = x.left;
+
+					x.left = y;
+					x.balance( 0 );
+					y.balance( 0 );
+				}
+				else {
+					if ( ASSERTS ) assert x.balance() == -1;
+
+					w = x.left;
+					x.left = w.right;
+					w.right = x;
+					y.right = w.left;
+					w.left = y;
+					if ( w.balance() == 1 ) {
+						x.balance( 0 );
+						y.balance( -1 );
+					}
+					else if ( w.balance() == 0 ) {
+						x.balance( 0 );
+						y.balance( 0 );
+					}
+					else {
+						x.balance( 1 );
+						y.balance( 0 );
+					}
+					w.balance( 0 );
+
+
+					if ( w.pred() ) {
+						y.succ( w );
+						w.pred( false );
+					}
+					if ( w.succ() ) {
+						x.pred( w );
+						w.succ( false );
+					}
+
+				}
+			}
+			else return defRetValue;
+
+			if ( z == null ) tree = w;
+			else {
+				if ( z.left == y ) z.left = w;
+				else z.right = w;
+			}
+		}
+
+		if ( ASSERTS ) checkTree( tree );
+		return defRetValue;
+	}
+
+	/** Finds the parent of an entry.
+	 *
+	 * @param e a node of the tree.
+	 * @return the parent of the given node, or <code>null</code> for the root.
+	 */
+
+	private Entry KEY_VALUE_GENERIC parent( final Entry KEY_VALUE_GENERIC e ) {
+		if ( e == tree ) return null;
+
+		Entry KEY_VALUE_GENERIC x, y, p;
+		x = y = e;
+
+		while( true ) {
+			if ( y.succ() ) {
+				p = y.right;
+				if ( p == null || p.left != e ) {
+					while( ! x.pred() ) x = x.left;
+					p = x.left;
+				}
+				return p;
+			}
+			else if ( x.pred() ) {
+				p = x.left;
+				if ( p == null || p.right != e ) {
+					while( ! y.succ() ) y = y.right;
+					p = y.right;
+				}
+				return p;
+			}
+
+			x = x.left;
+			y = y.right;
+		}
+	}
+
+	 
+	/* After execution of this method, {@link #modified} is true iff an entry
+	has been deleted. */
+
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) {
+		modified = false;
+
+		if ( tree == null ) return defRetValue;
+
+		int cmp;
+		Entry KEY_VALUE_GENERIC p = tree, q = null;
+		boolean dir = false;
+		final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k;
+
+		while( true ) {
+			if ( ( cmp = compare( kk, p.key ) ) == 0 ) break;
+			else if ( dir = cmp > 0 ) {
+				q = p;
+				if ( ( p = p.right() ) == null ) return defRetValue;
+			}
+			else {
+				q = p;
+				if ( ( p = p.left() ) == null ) return defRetValue;
+			}
+		}
+
+		if ( p.left == null ) firstEntry = p.next();
+		if ( p.right == null ) lastEntry = p.prev();
+
+		if ( p.succ() ) {
+			if ( p.pred() ) {
+				if ( q != null ) {
+					if ( dir ) q.succ( p.right );
+					else q.pred( p.left );
+				}
+				else tree = dir ? p.right : p.left;
+			}
+			else {
+				p.prev().right = p.right;
+
+				if ( q != null ) {
+					if ( dir ) q.right = p.left;
+					else q.left = p.left;
+				}
+				else tree = p.left;
+			}
+		}
+		else {
+			Entry KEY_VALUE_GENERIC r = p.right;
+
+			if ( r.pred() ) {
+				r.left = p.left;
+				r.pred( p.pred() );
+				if ( ! r.pred() ) r.prev().right = r;
+				if ( q != null ) {
+					if ( dir ) q.right = r;
+					else q.left = r;
+				}
+				else tree = r;
+
+				r.balance( p.balance() );
+				q = r;
+				dir = true;
+
+			}
+			else {
+				Entry KEY_VALUE_GENERIC s;
+
+				while( true ) {
+					s = r.left;
+					if ( s.pred() ) break;
+					r = s;
+				}
+
+				if ( s.succ() ) r.pred( s );
+				else r.left = s.right;
+
+				s.left = p.left;
+
+				if ( ! p.pred() ) {
+					p.prev().right = s;
+					s.pred( false );
+				}
+
+				s.right = p.right;
+				s.succ( false );
+								
+				if ( q != null ) {
+					if ( dir ) q.right = s;
+					else q.left = s;
+				}
+				else tree = s;
+
+				s.balance( p.balance() );
+				q = r;
+				dir = false;
+			}
+		}
+
+		Entry KEY_VALUE_GENERIC y;
+
+		while( q != null ) {
+			y = q;
+			q = parent( y );
+
+			if ( ! dir ) {
+				dir = q != null && q.left != y;
+				y.incBalance();
+
+				if ( y.balance() == 1 ) break;
+				else if ( y.balance() == 2 ) {
+
+					Entry KEY_VALUE_GENERIC x = y.right;
+					if ( ASSERTS ) assert x != null;
+									 
+					if ( x.balance() == -1 ) {
+						Entry KEY_VALUE_GENERIC w;
+								
+						if ( ASSERTS ) assert x.balance() == -1;
+
+						w = x.left;
+						x.left = w.right;
+						w.right = x;
+						y.right = w.left;
+						w.left = y;
+
+						if ( w.balance() == 1 ) {
+							x.balance( 0 );
+							y.balance( -1 );
+						}
+						else if ( w.balance() == 0 ) {
+							x.balance( 0 );
+							y.balance( 0 );
+						}
+						else {
+							if ( ASSERTS ) assert w.balance() == -1;
+
+							x.balance( 1 );
+							y.balance( 0 );
+						}
+
+						w.balance( 0 );
+
+						if ( w.pred() ) {
+							y.succ( w );
+							w.pred( false );
+						}
+						if ( w.succ() ) {
+							x.pred( w );
+							w.succ( false );
+						}
+
+						if ( q != null ) {
+							if ( dir ) q.right = w;
+							else q.left = w;
+						}
+						else tree = w;
+					}
+					else {
+						if ( q != null ) {
+							if ( dir ) q.right = x;
+							else q.left = x;
+						}
+						else tree = x;
+										  
+						if ( x.balance() == 0 ) {
+							y.right = x.left;
+							x.left = y;
+							x.balance( -1 );
+							y.balance( +1 );
+							break;
+						}
+						if ( ASSERTS ) assert x.balance() == 1;
+
+						if ( x.pred() ) {
+							y.succ( true );
+							x.pred( false );
+						}
+						else y.right = x.left;
+
+						x.left = y;
+						y.balance( 0 );
+						x.balance( 0 );
+					}
+				}
+			}
+			else {
+				dir = q != null && q.left != y;
+				y.decBalance();
+
+				if ( y.balance() == -1 ) break;
+				else if ( y.balance() == -2 ) {
+
+					Entry KEY_VALUE_GENERIC x = y.left;
+					if ( ASSERTS ) assert x != null;
+									 
+					if ( x.balance() == 1 ) {
+						Entry KEY_VALUE_GENERIC w;
+								
+						if ( ASSERTS ) assert x.balance() == 1;
+
+						w = x.right;
+						x.right = w.left;
+						w.left = x;
+						y.left = w.right;
+						w.right = y;
+
+						if ( w.balance() == -1 ) {
+							x.balance( 0 );
+							y.balance( 1 );
+						}
+						else if ( w.balance() == 0 ) {
+							x.balance( 0 );
+							y.balance( 0 );
+						}
+						else {
+							if ( ASSERTS ) assert w.balance() == 1;
+
+							x.balance( -1 );
+							y.balance( 0 );
+						}
+
+						w.balance( 0 );
+
+						if ( w.pred() ) {
+							x.succ( w );
+							w.pred( false );
+						}
+						if ( w.succ() ) {
+							y.pred( w );
+							w.succ( false );
+						}
+
+						if ( q != null ) {
+							if ( dir ) q.right = w;
+							else q.left = w;
+						}
+						else tree = w;
+					}
+					else {
+						if ( q != null ) {
+							if ( dir ) q.right = x;
+							else q.left = x;
+						}
+						else tree = x;
+										  
+						if ( x.balance() == 0 ) {
+							y.left = x.right;
+							x.right = y;
+							x.balance( +1 );
+							y.balance( -1 );
+							break;
+						}
+						if ( ASSERTS ) assert x.balance() == -1;
+
+						if ( x.succ() ) {
+							y.pred( true );
+							x.succ( false );
+						}
+						else y.left = x.right;
+
+						x.right = y;
+						y.balance( 0 );
+						x.balance( 0 );
+					}
+				}  
+			}
+		}
+
+		modified = true;
+		count--;
+		if ( ASSERTS ) checkTree( tree );
+		return p.value;
+	}
+	 
+
+#if ! #keyclass(Object) || #values(primitive)
+	public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) {
+		final VALUE_GENERIC_TYPE oldValue = put( KEY_CLASS2TYPE(ok), VALUE_CLASS2TYPE(ov) );
+		return modified ? OBJECT_DEFAULT_RETURN_VALUE : VALUE2OBJ( oldValue );
+	}
+#endif
+
+
+
+#if ! #keyclass(Object) || #values(primitive)
+	public VALUE_GENERIC_CLASS remove( final Object ok ) {
+		final VALUE_GENERIC_TYPE oldValue = REMOVE_VALUE( KEY_OBJ2TYPE( ok ) );
+		return modified ? VALUE2OBJ( oldValue ) : OBJECT_DEFAULT_RETURN_VALUE;
+	}
+#endif
+
+
+	public boolean containsValue( final VALUE_TYPE v ) {
+		final ValueIterator i = new ValueIterator();
+		VALUE_GENERIC_TYPE ev;
+		  
+		int j = count;
+		while( j-- != 0 ) {
+			ev = i.NEXT_VALUE();
+			if ( VALUE_EQUALS( ev, v ) ) return true;
+		}
+		  
+		return false;
+	}
+
+	public void clear() {
+		count = 0;
+		tree = null;
+		entries = null;
+		values = null;
+		keys = null;
+		firstEntry = lastEntry = null;
+	}
+
+	 
+	/** This class represent an entry in a tree map.
+	 *
+	 * <P>We use the only "metadata", i.e., {@link Entry#info}, to store
+	 * information about balance, predecessor status and successor status.
+	 *
+	 * <P>Note that since the class is recursive, it can be
+	 * considered equivalently a tree.
+	 */
+
+	private static final class Entry KEY_VALUE_GENERIC implements Cloneable, MAP.Entry KEY_VALUE_GENERIC {
+		/** If the bit in this mask is true, {@link #right} points to a successor. */
+		private final static int SUCC_MASK = 1 << 31;
+		/** If the bit in this mask is true, {@link #left} points to a predecessor. */
+		private final static int PRED_MASK = 1 << 30;
+		/** The bits in this mask hold the node balance info. You can get it just by casting to byte. */
+		private final static int BALANCE_MASK = 0xFF;
+		/** The key of this entry. */
+		KEY_GENERIC_TYPE key;
+		/** The value of this entry. */
+		VALUE_GENERIC_TYPE value;
+		/** The pointers to the left and right subtrees. */
+		Entry KEY_VALUE_GENERIC left, right;
+		/** This integers holds different information in different bits (see {@link #SUCC_MASK}, {@link #PRED_MASK} and {@link #BALANCE_MASK}). */
+		int info;
+
+		Entry() {}
+
+		/** Creates a new entry with the given key and value.
+		 *
+		 * @param k a key.
+		 * @param v a value.
+		 */
+		Entry( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) {
+			this.key = k;
+			this.value = v;
+			info = SUCC_MASK | PRED_MASK;
+		}
+		  
+		/** Returns the left subtree. 
+		 *
+		 * @return the left subtree (<code>null</code> if the left
+		 * subtree is empty).
+		 */
+		Entry KEY_VALUE_GENERIC left() {
+			return ( info & PRED_MASK ) != 0 ? null : left;
+		}
+		  
+		/** Returns the right subtree. 
+		 *
+		 * @return the right subtree (<code>null</code> if the right
+		 * subtree is empty).
+		 */
+		Entry KEY_VALUE_GENERIC right() {
+			return ( info & SUCC_MASK ) != 0 ? null : right;
+		}
+		  
+		/** Checks whether the left pointer is really a predecessor.
+		 * @return true if the left pointer is a predecessor.
+		 */
+		boolean pred() {
+			return ( info & PRED_MASK ) != 0;
+		}
+		  
+		/** Checks whether the right pointer is really a successor.
+		 * @return true if the right pointer is a successor.
+		 */
+		boolean succ() {
+			return ( info & SUCC_MASK ) != 0;
+		}
+		  
+		/** Sets whether the left pointer is really a predecessor.
+		 * @param pred if true then the left pointer will be considered a predecessor.
+		 */
+		void pred( final boolean pred ) {
+			if ( pred ) info |= PRED_MASK;
+			else info &= ~PRED_MASK;
+		}
+		  
+		/** Sets whether the right pointer is really a successor.
+		 * @param succ if true then the right pointer will be considered a successor.
+		 */
+		void succ( final boolean succ ) {
+			if ( succ ) info |= SUCC_MASK;
+			else info &= ~SUCC_MASK;
+		}
+		  
+		/** Sets the left pointer to a predecessor.
+		 * @param pred the predecessr.
+		 */
+		void pred( final Entry KEY_VALUE_GENERIC pred ) {
+			info |= PRED_MASK;
+			left = pred;
+		}
+		  
+		/** Sets the right pointer to a successor.
+		 * @param succ the successor.
+		 */
+		void succ( final Entry KEY_VALUE_GENERIC succ ) {
+			info |= SUCC_MASK;
+			right = succ;
+		}
+		  
+		/** Sets the left pointer to the given subtree.
+		 * @param left the new left subtree.
+		 */
+		void left( final Entry KEY_VALUE_GENERIC left ) {
+			info &= ~PRED_MASK;
+			this.left = left;
+		}
+		  
+		/** Sets the right pointer to the given subtree.
+		 * @param right the new right subtree.
+		 */
+		void right( final Entry KEY_VALUE_GENERIC right ) {
+			info &= ~SUCC_MASK;
+			this.right = right;
+		}
+		  
+		/** Returns the current level of the node.
+		 * @return the current level of this node.
+		 */
+		int balance() {
+			return (byte)info;
+		}
+
+		/** Sets the level of this node.
+		 * @param level the new level of this node.
+		 */
+		void balance( int level ) {
+			info &= ~BALANCE_MASK;
+			info |= ( level & BALANCE_MASK );
+		}
+
+		/** Increments the level of this node. */
+		void incBalance() {
+			info = info & ~BALANCE_MASK | ( (byte)info + 1 ) & 0xFF;
+		}
+		  
+		/** Decrements the level of this node. */
+		protected	void decBalance() {
+			info = info & ~BALANCE_MASK | ( (byte)info - 1 ) & 0xFF;
+		}
+
+		/** Computes the next entry in the set order.
+		 *
+		 * @return the next entry (<code>null</code>) if this is the last entry).
+		 */
+
+		Entry KEY_VALUE_GENERIC next() {
+			Entry KEY_VALUE_GENERIC next = this.right;
+			if ( ( info & SUCC_MASK ) == 0 ) while ( ( next.info & PRED_MASK ) == 0 ) next = next.left;
+			return next;
+		}
+
+		/** Computes the previous entry in the set order.
+		 *
+		 * @return the previous entry (<code>null</code>) if this is the first entry).
+		 */
+
+		Entry KEY_VALUE_GENERIC prev() {
+			Entry KEY_VALUE_GENERIC prev = this.left;
+			if ( ( info & PRED_MASK ) == 0 ) while ( ( prev.info & SUCC_MASK ) == 0 ) prev = prev.right;
+			return prev;
+		}
+
+		public KEY_GENERIC_CLASS getKey() {
+			return KEY2OBJ(key);
+		}
+		  
+#if ! #keyclass(Object)
+		public KEY_GENERIC_TYPE ENTRY_GET_KEY() {
+			return key;
+		}
+#endif
+		  
+		public VALUE_GENERIC_CLASS getValue() {
+			return VALUE2OBJ(value);
+		}
+		  
+#if #values(primitive)
+		public VALUE_TYPE ENTRY_GET_VALUE() {
+			return value;
+		}
+#endif
+		  
+		public VALUE_GENERIC_TYPE setValue(final VALUE_GENERIC_TYPE value) {
+			final VALUE_GENERIC_TYPE oldValue = this.value;
+			this.value = value;
+			return oldValue;
+		}
+		  
+#if #values(primitive)
+		  
+		public VALUE_GENERIC_CLASS setValue(final VALUE_GENERIC_CLASS value) {
+			return VALUE2OBJ(setValue(VALUE_CLASS2TYPE(value)));
+		}
+		  
+#endif
+
+		@SuppressWarnings("unchecked")
+		public Entry KEY_VALUE_GENERIC clone() {
+			Entry KEY_VALUE_GENERIC c;
+			try {
+				c = (Entry KEY_VALUE_GENERIC)super.clone();
+			}
+			catch(CloneNotSupportedException cantHappen) {
+				throw new InternalError();
+			}
+
+			c.key = key;
+			c.value = value;
+			c.info = info;
+
+			return c;
+		}
+
+		@SuppressWarnings("unchecked")
+		public boolean equals( final Object o ) {
+			if (!(o instanceof Map.Entry)) return false;
+			Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+				
+			return KEY_EQUALS( key, KEY_CLASS2TYPE( e.getKey() ) ) && VALUE_EQUALS( value, VALUE_CLASS2TYPE( e.getValue() ) );
+		}
+		  
+		public int hashCode() {
+			return KEY2JAVAHASH(key) ^ VALUE2JAVAHASH(value);
+		}
+		  
+		  
+		public String toString() {
+			return key + "=>" + value;
+		}
+		  
+
+		/*
+		public void prettyPrint() {
+			prettyPrint(0);
+		}
+
+		public void prettyPrint(int level) {
+			if ( pred() ) {
+				for (int i = 0; i < level; i++)
+					System.err.print("  ");
+				System.err.println("pred: " + left );
+			}
+			else if (left != null)
+				left.prettyPrint(level +1 );
+			for (int i = 0; i < level; i++)
+				System.err.print("  ");
+			System.err.println(key + "=" + value + " (" + balance() + ")");
+			if ( succ() ) {
+				for (int i = 0; i < level; i++)
+					System.err.print("  ");
+				System.err.println("succ: " + right );
+			}
+			else if (right != null)
+				right.prettyPrint(level + 1);
+		}
+		*/
+	}
+	 
+	/*
+	public void prettyPrint() {
+		System.err.println("size: " + count);
+		if (tree != null) tree.prettyPrint();
+	}
+	*/
+
+	@SuppressWarnings("unchecked")
+	public boolean containsKey( final KEY_TYPE k ) {
+		return findKey( KEY_GENERIC_CAST k ) != null;
+	}
+	 
+	public int size() {
+		return count;
+	}
+	 
+	public boolean isEmpty() {
+		return count == 0;
+	}
+	 
+	 
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) {
+		final Entry KEY_VALUE_GENERIC e = findKey( KEY_GENERIC_CAST k );
+		return e == null ? defRetValue : e.value;
+	}
+
+#if #keyclass(Object) && #values(primitive)
+
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_CLASS get( final Object ok ) {
+		final Entry KEY_VALUE_GENERIC e = findKey( KEY_GENERIC_CAST ok );
+		return e == null ? OBJECT_DEFAULT_RETURN_VALUE : e.getValue();
+	}
+
+#endif
+
+	public KEY_GENERIC_TYPE FIRST_KEY() {
+		if ( tree == null ) throw new NoSuchElementException();
+		return firstEntry.key;
+	}
+
+	public KEY_GENERIC_TYPE LAST_KEY() {
+		if ( tree == null ) throw new NoSuchElementException();
+		return lastEntry.key;
+	}
+
+
+	/** An abstract iterator on the whole range.
+	 *
+	 * <P>This class can iterate in both directions on a threaded tree.
+	 */
+
+	private class TreeIterator {
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). */
+		Entry KEY_VALUE_GENERIC prev;
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). */
+		Entry KEY_VALUE_GENERIC next;
+		/** The last entry that was returned (or <code>null</code> if we did not iterate or used {@link #remove()}). */
+		Entry KEY_VALUE_GENERIC curr;
+		/** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this {@link TreeIterator} has been created using the nonempty constructor.*/
+		int index = 0;
+		  
+		TreeIterator() {
+			next = firstEntry;
+		}
+
+		TreeIterator( final KEY_GENERIC_TYPE k ) {
+			if ( ( next = locateKey( k ) ) != null ) {
+				if ( compare( next.key, k ) <= 0 ) {
+					prev = next;
+					next = next.next();
+				}
+				else prev = next.prev();
+			}
+		}
+
+		public boolean hasNext() { return next != null; }
+		public boolean hasPrevious() { return prev != null; }
+
+		void updateNext() {
+			next = next.next();
+		}
+
+		Entry KEY_VALUE_GENERIC nextEntry() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			curr = prev = next;
+			index++;
+			updateNext();
+			return curr;
+		}
+
+		void updatePrevious() {
+			prev = prev.prev();
+		}
+
+		Entry KEY_VALUE_GENERIC previousEntry() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+			curr = next = prev;
+			index--;
+			updatePrevious();
+			return curr;
+		}
+
+		public int nextIndex() {
+			return index;
+		}
+
+		public int previousIndex() {
+			return index - 1;
+		}
+
+		public void remove() {
+			if ( curr == null ) throw new IllegalStateException();
+			/* If the last operation was a next(), we are removing an entry that preceeds
+			   the current index, and thus we must decrement it. */
+			if ( curr == prev ) index--;
+			next = prev = curr;
+			updatePrevious();
+			updateNext();
+			AVL_TREE_MAP.this.REMOVE_VALUE( curr.key );
+			curr = null;
+		}
+
+		public int skip( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasNext() ) nextEntry(); 
+			return n - i - 1;
+		}
+
+		public int back( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasPrevious() ) previousEntry(); 
+			return n - i - 1;
+		}
+	}
+
+
+	/** An iterator on the whole range.
+	 *
+	 * <P>This class can iterate in both directions on a threaded tree.
+	 */
+
+	private class EntryIterator extends TreeIterator implements ObjectListIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		EntryIterator() {}
+
+		EntryIterator( final KEY_GENERIC_TYPE k ) {
+			super( k );
+		}
+
+		public MAP.Entry KEY_VALUE_GENERIC next() { return nextEntry(); }
+		public MAP.Entry KEY_VALUE_GENERIC previous() { return previousEntry(); }
+
+		public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+		public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+	}
+
+
+	public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() {
+		if ( entries == null ) entries = new AbstractObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC>() {
+				final Comparator<? super MAP.Entry KEY_VALUE_GENERIC> comparator = new Comparator<MAP.Entry KEY_VALUE_GENERIC> () {
+					public int compare( final MAP.Entry KEY_VALUE_GENERIC x, final MAP.Entry KEY_VALUE_GENERIC y ) {
+						return AVL_TREE_MAP.this.storedComparator.compare( x.getKey(), y.getKey() );
+					}
+				};
+
+				public Comparator<? super MAP.Entry KEY_VALUE_GENERIC> comparator() {
+					return comparator;
+				}
+
+				public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+					return new EntryIterator();
+				}
+
+				public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator( final MAP.Entry KEY_VALUE_GENERIC from ) {
+					return new EntryIterator( KEY_CLASS2TYPE( from.getKey() ) );
+				}
+
+				@SuppressWarnings("unchecked")
+				public boolean contains( final Object o ) {
+					if (!(o instanceof Map.Entry)) return false;
+					final Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+					final Entry KEY_VALUE_GENERIC f = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+					return e.equals( f );
+				}					 
+
+				@SuppressWarnings("unchecked")
+				public boolean remove( final Object o ) {
+					if (!(o instanceof Map.Entry)) return false;
+					final Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+					final Entry KEY_VALUE_GENERIC f = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+					if ( f != null ) AVL_TREE_MAP.this.REMOVE_VALUE( f.key );
+					return f != null;
+				}
+
+				public int size() { return count; }
+				public void clear() { AVL_TREE_MAP.this.clear(); }
+					 
+				public MAP.Entry KEY_VALUE_GENERIC first() { return firstEntry; }
+				public MAP.Entry KEY_VALUE_GENERIC last() { return lastEntry; }
+				public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> subSet( MAP.Entry KEY_VALUE_GENERIC from, MAP.Entry KEY_VALUE_GENERIC to  ) { return subMap( from.getKey(), to.getKey() ).ENTRYSET(); }
+				public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> headSet( MAP.Entry KEY_VALUE_GENERIC to  ) { return headMap( to.getKey() ).ENTRYSET(); }
+				public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> tailSet( MAP.Entry KEY_VALUE_GENERIC from  ) { return tailMap( from.getKey() ).ENTRYSET(); }
+			};
+
+		return entries;
+	}
+
+	/** An iterator on the whole range of keys.
+	 *
+	 * <P>This class can iterate in both directions on the keys of a threaded tree. We 
+	 * simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly
+	 * their type-specific counterparts) so that they return keys instead of entries.
+	 */
+	private final class KeyIterator extends TreeIterator implements KEY_LIST_ITERATOR KEY_GENERIC {
+		public KeyIterator() {}
+		public KeyIterator( final KEY_GENERIC_TYPE k ) { super( k ); }
+		public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; }
+		public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; }
+
+		public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+
+#if !#keyclass(Object)
+		public KEY_GENERIC_CLASS next() { return KEY2OBJ( nextEntry().key ); }
+		public KEY_GENERIC_CLASS previous() { return KEY2OBJ( previousEntry().key ); }
+		public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+		public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif						
+	};
+
+	/** A keyset implementation using a more direct implementation for iterators. */
+	private class KeySet extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC.KeySet {
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new KeyIterator(); }
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new KeyIterator( from ); }
+	}
+
+	/** Returns a type-specific sorted set view of the keys contained in this map.
+	 *
+	 * <P>In addition to the semantics of {@link java.util.Map#keySet()}, you can
+	 * safely cast the set returned by this call to a type-specific sorted
+	 * set interface.
+	 *
+	 * @return a type-specific sorted set view of the keys contained in this map.
+	 */
+	public SORTED_SET KEY_GENERIC keySet() {
+		if ( keys == null ) keys = new KeySet();
+		return keys;
+	}
+
+	/** An iterator on the whole range of values.
+	 *
+	 * <P>This class can iterate in both directions on the values of a threaded tree. We 
+	 * simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly
+	 * their type-specific counterparts) so that they return values instead of entries.
+	 */
+	private final class ValueIterator extends TreeIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC {
+		public VALUE_GENERIC_TYPE NEXT_VALUE() { return nextEntry().value; }
+		public VALUE_GENERIC_TYPE PREV_VALUE() { return previousEntry().value; }
+		public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+		public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+
+#if #values(primitive)
+		public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( nextEntry().value ); }
+		public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( previousEntry().value ); }
+		public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+		public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif									
+	};
+
+	/** Returns a type-specific collection view of the values contained in this map.
+	 *
+	 * <P>In addition to the semantics of {@link java.util.Map#values()}, you can
+	 * safely cast the collection returned by this call to a type-specific collection
+	 * interface.
+	 *
+	 * @return a type-specific collection view of the values contained in this map.
+	 */
+
+	public VALUE_COLLECTION VALUE_GENERIC values() {
+		if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() {
+				public VALUE_ITERATOR VALUE_GENERIC iterator() {
+					return new ValueIterator();
+				}
+
+				public boolean contains( final VALUE_TYPE k ) {
+					return containsValue( k );
+				} 
+
+				public int size() {
+					return count;
+				}
+					 
+				public void clear() {
+					AVL_TREE_MAP.this.clear();
+				}
+					 
+			};
+
+		return values;
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+		return actualComparator;
+	}
+
+	public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_TYPE to ) {
+		return new Submap( KEY_NULL, true, to, false );
+	}
+
+	public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_TYPE from ) {
+		return new Submap( from, false, KEY_NULL, true );
+	}
+
+	public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from,  KEY_GENERIC_TYPE to ) {
+		return new Submap( from, false, to, false );
+	}
+
+	/** A submap with given range.
+	 *
+	 * <P>This class represents a submap. One has to specify the left/right
+	 * limits (which can be set to -∞ or ∞). Since the submap is a
+	 * view on the map, at a given moment it could happen that the limits of
+	 * the range are not any longer in the main map. Thus, things such as
+	 * {@link java.util.SortedMap#firstKey()} or {@link java.util.Collection#size()} must be always computed
+	 * on-the-fly.  
+	 */
+	private final class Submap extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable {
+    	private static final long serialVersionUID = -7046029254386353129L;
+
+		/** The start of the submap range, unless {@link #bottom} is true. */
+		KEY_GENERIC_TYPE from;
+		/** The end of the submap range, unless {@link #top} is true. */
+		KEY_GENERIC_TYPE to;
+		/** If true, the submap range starts from -∞. */
+		boolean bottom;
+		/** If true, the submap range goes to ∞. */
+		boolean top;
+		/** Cached set of entries. */
+		@SuppressWarnings("hiding")
+		protected transient volatile ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> entries;
+		/** Cached set of keys. */
+		@SuppressWarnings("hiding")
+ 		protected transient volatile SORTED_SET KEY_GENERIC keys;
+		/** Cached collection of values. */
+		@SuppressWarnings("hiding")
+		protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+
+		/** Creates a new submap with given key range.
+		 *
+		 * @param from the start of the submap range.
+		 * @param bottom if true, the first parameter is ignored and the range starts from -∞.
+		 * @param to the end of the submap range.
+		 * @param top if true, the third parameter is ignored and the range goes to ∞.
+		 */
+		public Submap( final KEY_GENERIC_TYPE from, final boolean bottom, final KEY_GENERIC_TYPE to, final boolean top ) {
+			if ( ! bottom && ! top && AVL_TREE_MAP.this.compare( from, to ) > 0 ) throw new IllegalArgumentException( "Start key (" + from  + ") is larger than end key (" + to + ")" );
+
+			this.from = from;
+			this.bottom = bottom;
+			this.to = to;
+			this.top = top;
+			this.defRetValue = AVL_TREE_MAP.this.defRetValue;
+		}
+
+		public void clear() {
+			final SubmapIterator i = new SubmapIterator();
+			while( i.hasNext() ) {
+				i.nextEntry();
+				i.remove();
+			}
+		}
+
+		/** Checks whether a key is in the submap range.
+		 * @param k a key.
+		 * @return true if is the key is in the submap range.
+		 */
+		final boolean in( final KEY_GENERIC_TYPE k ) {
+			return ( bottom || AVL_TREE_MAP.this.compare( k, from ) >= 0 ) &&
+				( top || AVL_TREE_MAP.this.compare( k, to ) < 0 );
+		}
+
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() {
+			if ( entries == null ) entries = new AbstractObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC>() {
+					public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+						return new SubmapEntryIterator();
+					}
+						  
+					public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator( final MAP.Entry KEY_VALUE_GENERIC from ) {
+						return new SubmapEntryIterator( KEY_CLASS2TYPE( from.getKey() ) );
+					}
+
+					public Comparator<? super MAP.Entry KEY_VALUE_GENERIC> comparator() { return AVL_TREE_MAP.this.entrySet().comparator(); }
+
+					@SuppressWarnings("unchecked")
+					public boolean contains( final Object o ) {
+						if (!(o instanceof Map.Entry)) return false;
+						final Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+						final AVL_TREE_MAP.Entry KEY_VALUE_GENERIC f = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+						return f != null && in( f.key ) && e.equals( f );
+					}					 
+
+					@SuppressWarnings("unchecked")
+					public boolean remove( final Object o ) {
+						if (!(o instanceof Map.Entry)) return false;
+						final Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+						final AVL_TREE_MAP.Entry KEY_VALUE_GENERIC f = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+						if ( f != null && in( f.key ) ) Submap.this.REMOVE_VALUE( f.key );
+						return f != null;
+					}
+
+					public int size() {
+						int c = 0;
+						for( Iterator<?> i = iterator(); i.hasNext(); i.next() ) c++;
+						return c;
+					}
+					 
+
+					public boolean isEmpty() {
+						return ! new SubmapIterator().hasNext();
+					}
+
+					public void clear() {
+						Submap.this.clear();
+					}
+					 
+					public MAP.Entry KEY_VALUE_GENERIC first() { return firstEntry(); }
+					public MAP.Entry KEY_VALUE_GENERIC last() { return lastEntry(); }
+					public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> subSet( MAP.Entry KEY_VALUE_GENERIC from, MAP.Entry KEY_VALUE_GENERIC to  ) { return subMap( from.getKey(), to.getKey() ).ENTRYSET(); }
+					public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> headSet( MAP.Entry KEY_VALUE_GENERIC to  ) { return headMap( to.getKey() ).ENTRYSET(); }
+					public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> tailSet( MAP.Entry KEY_VALUE_GENERIC from  ) { return tailMap( from.getKey() ).ENTRYSET(); }
+				};
+
+			return entries;
+		}
+
+		private class KeySet extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC.KeySet {
+			public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new SubmapKeyIterator();	}
+			public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new SubmapKeyIterator( from ); }
+		}
+
+		public SORTED_SET KEY_GENERIC keySet() {
+			if ( keys == null ) keys = new KeySet();
+			return keys;
+		}
+		  
+		public VALUE_COLLECTION VALUE_GENERIC values() {
+			if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() {
+					public VALUE_ITERATOR VALUE_GENERIC iterator() {
+						return new SubmapValueIterator();
+					}
+
+					public boolean contains( final VALUE_TYPE k ) {
+						return containsValue( k );
+					} 
+
+					public int size() {
+						return Submap.this.size();
+					}
+						  
+					public void clear() {
+						Submap.this.clear();
+					}
+						  
+				};
+				
+			return values;
+		}
+		  
+		@SuppressWarnings("unchecked")
+		public boolean containsKey( final KEY_TYPE k ) {
+			return in( KEY_GENERIC_CAST k ) && AVL_TREE_MAP.this.containsKey( k );
+		}
+
+		public boolean containsValue( final VALUE_TYPE v ) {
+			final SubmapIterator i = new SubmapIterator();
+			VALUE_TYPE ev;
+				
+			while( i.hasNext() ) {
+				ev = i.nextEntry().value;
+				if ( VALUE_EQUALS( ev, v ) ) return true;
+			}
+				
+			return false;
+		}
+		  
+
+		@SuppressWarnings("unchecked")
+		public VALUE_GENERIC_TYPE GET_VALUE(final KEY_TYPE k) {
+			final AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e;
+			final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k;
+			return in( kk ) && ( e = findKey( kk ) ) != null ? e.value : this.defRetValue;
+		}
+		  
+		  
+#if #keyclass(Object) && #values(primitive)
+
+		@SuppressWarnings("unchecked")
+		public VALUE_GENERIC_CLASS get( final Object ok ) {
+			final AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e;
+			final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST KEY_OBJ2TYPE( ok );
+			return in( kk ) && ( e = findKey( kk ) ) != null ? e.getValue() : OBJECT_DEFAULT_RETURN_VALUE;
+		}
+#endif
+		  
+		public VALUE_GENERIC_TYPE put(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v) {
+			modified = false;
+			if ( ! in( k ) ) throw new IllegalArgumentException( "Key (" + k + ") out of range [" + ( bottom ? "-" : String.valueOf( from ) ) + ", " + ( top ? "-" : String.valueOf( to ) ) + ")" ); 
+			final VALUE_GENERIC_TYPE oldValue = AVL_TREE_MAP.this.put( k, v );
+			return modified ? this.defRetValue : oldValue;
+		}
+
+		  
+#if ! #keyclass(Object) || #values(primitive)
+		public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) {
+			final VALUE_GENERIC_TYPE oldValue = put( KEY_CLASS2TYPE(ok), VALUE_CLASS2TYPE(ov) );
+			return modified ? OBJECT_DEFAULT_RETURN_VALUE : VALUE2OBJ( oldValue );
+		}
+#endif
+
+		@SuppressWarnings("unchecked")
+		public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) {
+			modified = false;
+			if ( ! in( KEY_GENERIC_CAST k ) ) return this.defRetValue;
+			final VALUE_GENERIC_TYPE oldValue = AVL_TREE_MAP.this.REMOVE_VALUE( k );
+			return modified ? oldValue : this.defRetValue;
+		}
+
+#if ! #keyclass(Object) || #values(primitive)
+		public VALUE_GENERIC_CLASS remove( final Object ok ) {
+			final VALUE_GENERIC_TYPE oldValue = REMOVE_VALUE( KEY_OBJ2TYPE( ok ) );
+			return modified ? VALUE2OBJ( oldValue ) : OBJECT_DEFAULT_RETURN_VALUE;
+		}
+#endif
+
+		public int size() {
+			final SubmapIterator i = new SubmapIterator();
+			int n = 0;
+				
+			while( i.hasNext() ) {
+				n++;
+				i.nextEntry();
+			}
+				
+			return n;
+		}
+
+
+		public boolean isEmpty() {
+			return ! new SubmapIterator().hasNext();
+		}
+		  
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+			return actualComparator;
+		}
+		  
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) {
+			if ( top ) return new Submap( from, bottom, to, false );
+			return compare( to, this.to ) < 0 ? new Submap( from, bottom, to, false ) : this;
+		}
+		  
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) {
+			if ( bottom ) return new Submap( from, false, to, top );
+			return compare( from, this.from ) > 0 ? new Submap( from, false, to, top ) : this;
+		}
+		  
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from,  KEY_GENERIC_TYPE to ) {
+			if ( top && bottom ) return new Submap( from, false, to, false );
+			if ( ! top ) to = compare( to, this.to ) < 0 ? to : this.to;
+			if ( ! bottom ) from = compare( from, this.from ) > 0 ? from : this.from;
+	 		if ( ! top && ! bottom && from == this.from && to == this.to ) return this;
+			return new Submap( from, false, to, false );
+		}
+
+		/** Locates the first entry.
+		 *
+		 * @return the first entry of this submap, or <code>null</code> if the submap is empty.
+		 */
+		public AVL_TREE_MAP.Entry KEY_VALUE_GENERIC firstEntry() {
+			if ( tree == null ) return null;
+			// If this submap goes to -infinity, we return the main map first entry; otherwise, we locate the start of the map.
+			AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e;
+			if ( bottom ) e = firstEntry;
+			else {
+				e = locateKey( from );
+				// If we find either the start or something greater we're OK.
+				if ( compare( e.key, from ) < 0 ) e = e.next();
+			}
+			// Finally, if this subset doesn't go to infinity, we check that the resulting key isn't greater than the end.
+			if ( e == null || ! top && compare( e.key, to ) >= 0 ) return null;
+			return e;
+		}
+	 
+		/** Locates the last entry.
+		 *
+		 * @return the last entry of this submap, or <code>null</code> if the submap is empty.
+		 */
+		public AVL_TREE_MAP.Entry KEY_VALUE_GENERIC lastEntry() {
+			if ( tree == null ) return null;
+			// If this submap goes to infinity, we return the main map last entry; otherwise, we locate the end of the map.
+			AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e;
+			if ( top ) e = lastEntry;
+			else {
+				e = locateKey( to );
+				// If we find something smaller than the end we're OK.
+				if ( compare( e.key, to ) >= 0 ) e = e.prev();
+			}
+			// Finally, if this subset doesn't go to -infinity, we check that the resulting key isn't smaller than the start.
+			if ( e == null || ! bottom && compare( e.key, from ) < 0 ) return null;
+			return e;
+		}
+
+		public KEY_GENERIC_TYPE FIRST_KEY() {
+			AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e = firstEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.key;
+		}
+		public KEY_GENERIC_TYPE LAST_KEY() {
+			AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e = lastEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.key;
+		}
+	 
+#if !#keyclass(Object)
+		public KEY_GENERIC_CLASS firstKey() {
+			AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e = firstEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.getKey();
+		}
+	 
+		public KEY_GENERIC_CLASS lastKey() {
+			AVL_TREE_MAP.Entry KEY_VALUE_GENERIC e = lastEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.getKey();
+		}
+#endif
+
+		/** An iterator for subranges.
+		 * 
+		 * <P>This class inherits from {@link TreeIterator}, but overrides the methods that
+		 * update the pointer after a {@link java.util.ListIterator#next()} or {@link java.util.ListIterator#previous()}. If we would
+		 * move out of the range of the submap we just overwrite the next or previous
+		 * entry with <code>null</code>.
+		 */
+		private class SubmapIterator extends TreeIterator {
+			SubmapIterator() {
+				next = firstEntry();
+			}
+
+			SubmapIterator( final KEY_GENERIC_TYPE k ) {
+				this();
+					 
+				if ( next != null ) {
+					if ( ! bottom && compare( k, next.key ) < 0 ) prev = null;
+					else if ( ! top && compare( k, ( prev = lastEntry() ).key ) >= 0 ) next = null;
+					else {
+						next = locateKey( k );
+								
+						if ( compare( next.key, k ) <= 0 ) {
+							prev = next;
+							next = next.next();
+						}
+						else prev = next.prev();
+					}
+				}			
+			}
+
+			void updatePrevious() {
+				prev = prev.prev();
+				if ( ! bottom && prev != null && AVL_TREE_MAP.this.compare( prev.key, from ) < 0 ) prev = null;
+			}
+				
+			void updateNext() {
+				next = next.next();
+				if ( ! top && next != null && AVL_TREE_MAP.this.compare( next.key, to ) >= 0 ) next = null;
+			}
+		}
+
+		private class SubmapEntryIterator extends SubmapIterator implements ObjectListIterator<MAP.Entry KEY_VALUE_GENERIC>  {
+			SubmapEntryIterator() {}
+
+			SubmapEntryIterator( final KEY_GENERIC_TYPE k ) {
+				super( k );
+			}
+
+			public MAP.Entry KEY_VALUE_GENERIC next() { return nextEntry(); }
+			public MAP.Entry KEY_VALUE_GENERIC previous() { return previousEntry(); }
+
+			public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+			public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+		}
+
+
+		/** An iterator on a subrange of keys.
+		 *
+		 * <P>This class can iterate in both directions on a subrange of the
+		 * keys of a threaded tree. We simply override the {@link
+		 * java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly their
+		 * type-specific counterparts) so that they return keys instead of
+		 * entries.
+		 */
+		private final class SubmapKeyIterator extends SubmapIterator implements KEY_LIST_ITERATOR KEY_GENERIC {
+			public SubmapKeyIterator() { super(); }
+			public SubmapKeyIterator( KEY_GENERIC_TYPE from ) { super( from ); }
+			public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; }
+			public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; }
+			public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+			public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+#if !#keyclass(Object)
+			public KEY_GENERIC_CLASS next() { return KEY2OBJ( nextEntry().key ); }
+			public KEY_GENERIC_CLASS previous() { return KEY2OBJ( previousEntry().key ); }
+			public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+			public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif									
+		};
+		  
+		/** An iterator on a subrange of values.
+		 *
+		 * <P>This class can iterate in both directions on the values of a
+		 * subrange of the keys of a threaded tree. We simply override the
+		 * {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly their
+		 * type-specific counterparts) so that they return values instead of
+		 * entries.  
+		 */
+		private final class SubmapValueIterator extends SubmapIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC {
+			public VALUE_GENERIC_TYPE NEXT_VALUE() { return nextEntry().value; }
+			public VALUE_GENERIC_TYPE PREV_VALUE() { return previousEntry().value; }
+			public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+			public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+
+#if #values(primitive)
+			public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( nextEntry().value ); }
+			public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( previousEntry().value ); }
+			public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+			public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif			
+		};
+
+
+	}
+	 
+
+	/** Returns a deep copy of this tree map.
+	 *
+	 * <P>This method performs a deep copy of this tree map; the data stored in the
+	 * set, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 * @return a deep copy of this tree map.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public AVL_TREE_MAP KEY_VALUE_GENERIC clone() {
+		AVL_TREE_MAP KEY_VALUE_GENERIC c;
+		try {
+			c = (AVL_TREE_MAP KEY_VALUE_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+		
+		c.keys = null;
+		c.values = null;
+		c.entries = null;
+		c.allocatePaths();
+
+		if ( count != 0 ) {
+			// Also this apparently unfathomable code is derived from GNU libavl.
+			Entry KEY_VALUE_GENERIC e, p, q, rp = new Entry KEY_VALUE_GENERIC(), rq = new Entry KEY_VALUE_GENERIC();
+
+			p = rp;
+			rp.left( tree );
+
+			q = rq;
+			rq.pred( null );
+
+			while( true ) {
+				if ( ! p.pred() ) {
+					e = p.left.clone();
+					e.pred( q.left );
+					e.succ( q );
+					q.left( e );
+
+					p = p.left;
+					q = q.left;
+				}
+				else {
+					while( p.succ() ) {
+						p = p.right;
+
+						if ( p == null ) {
+							q.right = null;
+							c.tree = rq.left;
+
+							c.firstEntry = c.tree;
+							while( c.firstEntry.left != null ) c.firstEntry = c.firstEntry.left;
+							c.lastEntry = c.tree;
+							while( c.lastEntry.right != null ) c.lastEntry = c.lastEntry.right;
+
+							return c;
+						}
+						q = q.right;
+					}
+								
+					p = p.right;
+					q = q.right;
+				}
+
+				if ( ! p.succ() ) {
+					e = p.right.clone();
+					e.succ( q.right );
+					e.pred( q );
+					q.right( e );
+				}
+			}
+		}
+
+		return c;
+	}
+	 
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		int n = count;
+		EntryIterator i = new EntryIterator();
+		Entry KEY_VALUE_GENERIC e;
+
+		s.defaultWriteObject();
+
+		while(n-- != 0) {
+			e = i.nextEntry();
+			s.WRITE_KEY( e.key );
+			s.WRITE_VALUE( e.value );
+		}
+	}
+
+
+	/** Reads the given number of entries from the input stream, returning the corresponding tree. 
+	 *
+	 * @param s the input stream.
+	 * @param n the (positive) number of entries to read.
+	 * @param pred the entry containing the key that preceeds the first key in the tree.
+	 * @param succ the entry containing the key that follows the last key in the tree.
+	 */
+	@SuppressWarnings("unchecked")
+	private Entry KEY_VALUE_GENERIC readTree( final java.io.ObjectInputStream s, final int n, final Entry KEY_VALUE_GENERIC pred, final Entry KEY_VALUE_GENERIC succ ) throws java.io.IOException, ClassNotFoundException {
+		if ( n == 1 ) {
+			final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() );
+			top.pred( pred );
+			top.succ( succ );
+
+			return top;
+		}
+
+		if ( n == 2 ) {
+			/* We handle separately this case so that recursion will
+			 *always* be on nonempty subtrees. */
+			final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() );
+			top.right( new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() ) );
+			top.right.pred( top );
+			top.balance( 1 );
+			top.pred( pred );
+			top.right.succ( succ );
+			
+			return top;
+		}
+
+		// The right subtree is the largest one.
+		final int rightN = n / 2, leftN = n - rightN - 1;
+
+		final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC();
+
+		top.left( readTree( s, leftN, pred, top ) );
+		
+		top.key = KEY_GENERIC_CAST s.READ_KEY();
+		top.value = VALUE_GENERIC_CAST s.READ_VALUE();
+
+		top.right( readTree( s, rightN, top, succ ) );
+
+		if ( n == ( n & -n ) ) top.balance( 1 ); // Quick test for determining whether n is a power of 2.
+
+		return top;
+	}
+
+
+
+	private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		/* The storedComparator is now correctly set, but we must restore
+		   on-the-fly the actualComparator. */
+		setActualComparator();
+		allocatePaths();
+
+		if ( count != 0 ) {
+			tree = readTree( s, count, null, null );
+			Entry KEY_VALUE_GENERIC e;
+
+			e = tree;
+			while( e.left() != null ) e = e.left();
+			firstEntry = e;
+
+			e = tree;
+			while( e.right() != null ) e = e.right();
+			lastEntry = e;
+		}
+
+		if ( ASSERTS ) checkTree( tree );
+	}
+
+
+	@SuppressWarnings("rawtypes")
+#ifdef ASSERTS_CODE
+	private static int checkTree( Entry e ) {
+		if ( e == null ) return 0;
+
+		final int leftN = checkTree( e.left() ), rightN = checkTree( e.right() );
+		if ( leftN + e.balance() != rightN )
+			throw new AssertionError( "Mismatch between left tree size (" + leftN + "), right tree size (" + rightN + ") and balance (" + e.balance() + ")" );
+
+		return Math.max( leftN , rightN ) + 1;
+	}
+#else
+	private static int checkTree( @SuppressWarnings("unused") Entry e ) { return 0; }
+#endif
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+	}
+
+	private static VALUE_TYPE genValue() {
+#if #valueclass(Byte) || #valueclass(Short) || #valueclass(Character)
+		return (VALUE_TYPE)(r.nextInt());
+#elif #values(primitive)
+		return r.NEXT_VALUE();
+#elif !#valueclass(Reference) || #keyclass(Reference)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		int i, j;
+		AVL_TREE_MAP m;
+		java.util.TreeMap t;
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		VALUE_TYPE v[] = new VALUE_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+			v[i] = genValue();
+		}
+
+		double totPut = 0, totYes = 0, totNo = 0, totIterFor = 0, totIterBack = 0, totRemYes = 0, d, dd;
+
+		if ( comp ) { for( j = 0; j < 20; j++ ) {
+
+			t = new java.util.TreeMap();
+
+			/* We first add all pairs to t. */
+			for( i = 0; i < n;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) );
+
+			/* Then we remove the first half and put it back. */
+			for( i = 0; i < n/2;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) );
+			d = System.currentTimeMillis() - ms;
+
+			/* Then we remove the other half and put it back again. */
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			dd = System.currentTimeMillis() - ms ;
+
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] )  );
+			d += System.currentTimeMillis() - ms;
+			if ( j > 2 ) totPut += n/d; 				
+			System.out.print("Add: " + format( n/d ) +" K/s " );
+
+			/* Then we remove again the first half. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			dd += System.currentTimeMillis() - ms ;
+			if ( j > 2 ) totRemYes += n/dd; 				
+			System.out.print("RemYes: " + format( n/dd ) +" K/s " );
+
+			/* And then we put it back. */
+			for( i = 0; i < n/2;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] )  );
+
+			/* We check for pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.containsKey( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.containsKey( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on t. */
+			ms = System.currentTimeMillis();
+			for( Iterator it = t.entrySet().iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterFor += d; 				
+			System.out.print("IterFor: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+		System.out.println();
+		System.out.println( "java.util Put: " + format( totPut/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) )+ " K/s IterFor: " + format( totIterFor/(j-3) )  + " K/s"  );
+
+		System.out.println();
+
+		t = null;
+		totPut = totYes = totNo = totIterFor = totIterBack = totRemYes = 0;
+
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new AVL_TREE_MAP();
+
+			/* We first add all pairs to m. */
+			for( i = 0; i < n;  i++ ) m.put( k[i], v[i] );
+
+			/* Then we remove the first half and put it back. */
+			for( i = 0; i < n/2;  i++ ) m.remove( k[i] );
+
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) m.put( k[i], v[i] );
+			d = System.currentTimeMillis() - ms;
+
+			/* Then we remove the other half and put it back again. */
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) m.remove( k[i] );
+			dd = System.currentTimeMillis() - ms ;
+
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) m.put( k[i], v[i]  );
+			d += System.currentTimeMillis() - ms;
+			if ( j > 2 ) totPut += n/d; 				
+			System.out.print("Add: " + format( n/d ) +" K/s " );
+
+			/* Then we remove again the first half. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) m.remove( k[i] );
+			dd += System.currentTimeMillis() - ms ;
+			if ( j > 2 ) totRemYes += n/dd; 				
+			System.out.print("RemYes: " + format( n/dd ) +" K/s " );
+
+			/* And then we put it back. */
+			for( i = 0; i < n/2;  i++ ) m.put( k[i], v[i]  );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.containsKey( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.containsKey( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+
+			/* We iterate on m. */
+			java.util.ListIterator it = (java.util.ListIterator)m.entrySet().iterator();
+			ms = System.currentTimeMillis();
+			for( it = (java.util.ListIterator)m.entrySet().iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterFor += d; 				
+			System.out.print("IterFor: " + format( d ) +" K/s " );
+				
+			/* We iterate back on m. */
+			ms = System.currentTimeMillis();
+			for( ; it.hasPrevious(); it.previous() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterBack += d; 				
+			System.out.print("IterBack: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Put: " + format( totPut/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s IterBack: " + format( totIterBack/(j-3) ) + "K/s"  );
+
+		System.out.println();
+
+	}
+
+
+	private static boolean valEquals(Object o1, Object o2) {
+		return o1 == null ? o2 == null : o1.equals(o2);
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static Object[] k, v, nk;
+	private static KEY_TYPE kt[];
+	private static KEY_TYPE nkt[];
+	private static VALUE_TYPE vt[];
+	private static AVL_TREE_MAP topMap;
+
+	protected static void testMaps( SORTED_MAP m, SortedMap t, int n, int level ) {
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement;
+		Object rt = null, rm = null;
+
+		if ( level > 4 ) return;
+				
+
+		/* Now we check that both maps agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.firstKey();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.firstKey();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence at start in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.firstKey() + ", " + t.firstKey() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.lastKey();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.lastKey();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence at start in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+
+		if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.lastKey() + ", " + t.lastKey() +")");
+
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+
+
+		/* Now we check that m actually holds that data. */
+		for(Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			Entry e = (Entry)i.next();
+			ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on m)" );
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		for(Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after insertion (iterating on t)" );
+			ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for(Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after insertion (iterating on m)" );
+			ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after insertion (iterating on m)" );
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		for(Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on t)" );
+			ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for(Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on m)");
+			ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on m)");
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.containsKey(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.containsKey(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): containsKey() divergence in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): containsKey() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) {
+				ensure( m.containsKey(KEY2OBJ(T)) == t.containsKey(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in keys between t and m (polymorphic method)" );
+					 
+#if #keyclass(Object) && ! ( #values(reference) )
+				if ((m.GET_VALUE(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+					t.get(KEY2OBJ(T)) != null && 
+					! VALUE2OBJ(m.GET_VALUE(T)).equals(t.get(KEY2OBJ(T)))) 
+#else
+					if ((m.get(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+						t.get(KEY2OBJ(T)) != null && 
+						! m.get(KEY2OBJ(T)).equals(t.get(KEY2OBJ(T)))) 
+#endif
+						{
+							System.out.println("Error (" + level + ", " + seed + "): divergence between t and m (polymorphic method)");
+							System.exit( 1 );
+						}
+			}
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.get(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.get(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): get() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): get() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( valEquals(m.get(KEY2OBJ(T)), t.get(KEY2OBJ(T))), "Error (" + level + ", " + seed + "): divergence between t and m (standard method)" );
+		}
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			VALUE_TYPE U = genValue();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				rm = m.put(KEY2OBJ(T), VALUE2OBJ(U));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				rt = t.put(KEY2OBJ(T), VALUE2OBJ(U));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): put() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): put() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( valEquals( rm, rt ), "Error (" + level + ", " + seed + "): divergence in put() between t and m (" + rt + ", " + rm + ")" );
+
+			T = genKey();
+				
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( valEquals( rm, rt ), "Error (" + level + ", " + seed + "): divergence in remove() between t and m (" + rt + ", " + rm + ")" );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal" );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			Entry e = (Entry)i.next();
+			ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after removal (iterating on m)" );
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		  
+		for(Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after removal (iterating on t)");
+			ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		  
+		for(Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after removal (iterating on m)");
+			ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after removal (iterating on m)");
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		  
+		for(Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after removal (iterating on t)" );
+			ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		  
+		for(Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after removal (iterating on m)");
+			ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after removal (iterating on m)");
+		}
+
+		/* Now we check that both maps agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.firstKey();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.firstKey();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ on their first key (" + m.firstKey() + ", " + t.firstKey() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.lastKey();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.lastKey();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+		if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ on their last key (" + m.lastKey() + ", " + t.lastKey() +")");
+
+		/* Now we check cloning. */
+
+		if ( level == 0 ) {
+			ensure( m.equals( ((AVL_TREE_MAP)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((AVL_TREE_MAP)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+		}
+
+		int h = m.hashCode();
+
+
+		/* Now we save and read m. */
+
+		SORTED_MAP m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (SORTED_MAP)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if !#valueclass(Reference)
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+#else
+		m2.clear();
+		m2.putAll( m );
+#endif
+
+		/* Now we take out of m everything, and check that it is empty. */
+		  
+		for(Iterator i=t.keySet().iterator(); i.hasNext(); ) m2.remove(i.next());
+		  
+		ensure( m2.isEmpty(), "Error (" + level + ", " + seed + "): m2 is not empty (as it should be)" );
+
+		/* Now we play with iterators. */
+
+		{
+			java.util.ListIterator i, j;
+			Map.Entry E, F;
+			Object J;
+			i = (java.util.ListIterator)m.entrySet().iterator(); 
+			j = new java.util.LinkedList( t.entrySet() ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + level + ", " + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( (E=(Entry)i.next()).getKey().equals( J = (F=(Map.Entry)j.next()).getKey() ), "Error (" + level + ", " + seed + "): divergence in next()" );
+
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Object U = VALUE2OBJ(genValue());
+						E.setValue( U );
+                        t.put( F.getKey(), U );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( (E=(Entry)i.previous()).getKey().equals( J = (F=(Map.Entry)j.previous()).getKey() ), "Error (" + level + ", " + seed + "): divergence in previous()" );
+
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Object U = VALUE2OBJ(genValue());
+						E.setValue( U );
+                        t.put( F.getKey(), U );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + level + ", " + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + level + ", " + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+		  
+
+		{
+			boolean badPrevious = false;
+			Object previous = null;
+			it.unimi.dsi.fastutil.BidirectionalIterator i;
+			java.util.ListIterator j;
+			Object I, J;
+			KEY_TYPE from = genKey();
+			j = new java.util.LinkedList( t.keySet() ).listIterator(); 
+			while( j.hasNext() ) {
+				Object k = j.next();
+				if ( ((Comparable)k).compareTo( KEY2OBJ( from ) ) > 0 ) {
+					badPrevious = true;
+					j.previous();
+					break;
+				}
+				previous = k;
+			}
+
+			i = (it.unimi.dsi.fastutil.BidirectionalIterator)((SORTED_SET)m.keySet()).iterator( from ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" + badPrevious );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+					//System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+
+					badPrevious = false;
+
+					if ( r.nextFloat() < 0.5 ) {
+						//System.err.println("Removing in next");
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+
+					if ( r.nextFloat() < 0.5 ) {
+						//System.err.println("Removing in prev");
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" );
+
+		/* Now we select a pair of keys and create a submap. */
+
+		if ( ! m.isEmpty() ) {
+			java.util.ListIterator i;
+			Object start = m.firstKey(), end = m.firstKey();
+			for( i = (java.util.ListIterator)m.keySet().iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() );
+			for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() );
+				
+			//System.err.println("Checking subMap from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.subMap( (KEY_CLASS)start, (KEY_CLASS)end ), t.subMap( start, end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subMap" );
+
+			//System.err.println("Checking headMap to " + end + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.headMap( (KEY_CLASS)end ), t.headMap( end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headMap" );
+
+			//System.err.println("Checking tailMap from " + start + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.tailMap( (KEY_CLASS)start ), t.tailMap( start ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailMap" );
+		}
+		  
+
+	}
+
+
+	private static void test( int n ) {
+		AVL_TREE_MAP m = new AVL_TREE_MAP();
+		SortedMap t = new java.util.TreeMap();
+		topMap = m;
+		k = new Object[n];
+		v = new Object[n];
+		nk = new Object[n];
+		kt = new KEY_TYPE[n];
+		nkt = new KEY_TYPE[n];
+		vt = new VALUE_TYPE[n];
+
+		for( int i = 0; i < n; i++ ) {
+#if #keyclass(Object)
+			k[i] = kt[i] = genKey();
+			nk[i] = nkt[i] = genKey();
+#else
+			k[i] = new KEY_CLASS( kt[i] = genKey() );
+			nk[i] = new KEY_CLASS( nkt[i] = genKey() );
+#endif
+#if #values(reference)
+			v[i] = vt[i] = genValue();
+#else
+			v[i] = new VALUE_CLASS( vt[i] = genValue() );
+#endif
+		}
+		  
+		/* We add pairs to t. */
+		for( int i = 0; i < n;  i++ ) t.put( k[i], v[i] );
+		  
+		/* We add to m the same data */
+		m.putAll(t);
+
+		testMaps( m, t, n, 0 );
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+
+}
diff --git a/drv/AVLTreeSet.drv b/drv/AVLTreeSet.drv
new file mode 100644
index 0000000..b04084e
--- /dev/null
+++ b/drv/AVLTreeSet.drv
@@ -0,0 +1,2165 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.SortedSet;
+import java.util.NoSuchElementException;
+
+/** A type-specific AVL tree set with a fast, small-footprint implementation.
+ *
+ * <P>The iterators provided by this class are type-specific {@link
+ * it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}.
+ * Moreover, the iterator returned by <code>iterator()</code> can be safely cast
+ * to a type-specific {@linkplain java.util.ListIterator list iterator}.
+ */
+
+public class AVL_TREE_SET KEY_GENERIC extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, Cloneable, SORTED_SET KEY_GENERIC {
+
+	/** A reference to the root entry. */
+	protected transient Entry KEY_GENERIC tree;
+
+	/** Number of elements in this set. */
+	protected int count;
+
+	/** The entry of the first element of this set. */
+	protected transient Entry KEY_GENERIC firstEntry;
+
+	/** The entry of the last element of this set. */
+	protected transient Entry KEY_GENERIC lastEntry;
+
+	/** This set's comparator, as provided in the constructor. */
+	protected Comparator<? super KEY_GENERIC_CLASS> storedComparator;
+
+	/** This set's actual comparator; it may differ from {@link #storedComparator} because it is
+		always a type-specific comparator, so it could be derived from the former by wrapping. */
+	protected transient KEY_COMPARATOR KEY_SUPER_GENERIC actualComparator;
+
+    private static final long serialVersionUID = -7046029254386353130L;
+
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	{
+		allocatePaths();
+	}
+
+	/** Creates a new empty tree set. 
+	 */
+
+	public AVL_TREE_SET() {
+		tree = null;
+		count = 0;
+	}
+
+	/** Generates the comparator that will be actually used.
+	 *
+	 * <P>When a specific {@link Comparator} is specified and stored in {@link
+	 * #storedComparator}, we must check whether it is type-specific.  If it is
+	 * so, we can used directly, and we store it in {@link #actualComparator}. Otherwise,
+	 * we generate on-the-fly an anonymous class that wraps the non-specific {@link Comparator}
+	 * and makes it into a type-specific one.
+	 */
+	@SuppressWarnings("unchecked")
+	private void setActualComparator() {
+#if #keyclass(Object)
+		actualComparator = storedComparator;
+#else
+		/* If the provided comparator is already type-specific, we use it. Otherwise,
+		   we use a wrapper anonymous class to fake that it is type-specific. */
+		if ( storedComparator == null || storedComparator instanceof KEY_COMPARATOR ) actualComparator = (KEY_COMPARATOR)storedComparator;
+		else actualComparator =	new KEY_COMPARATOR KEY_GENERIC() {
+				public int compare( KEY_GENERIC_TYPE k1, KEY_GENERIC_TYPE k2 ) {
+					return storedComparator.compare( KEY2OBJ( k1 ), KEY2OBJ( k2 ) );
+				}
+				public int compare( KEY_CLASS ok1, KEY_CLASS ok2 ) {
+					return storedComparator.compare( ok1, ok2 );
+				}
+			};
+#endif
+	}
+	 
+
+	/** Creates a new empty tree set with the given comparator.
+	 *
+	 * @param c a {@link Comparator} (even better, a type-specific comparator).
+	 */
+
+	public AVL_TREE_SET( final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this();
+		storedComparator = c;
+		setActualComparator();
+	}
+
+
+	/** Creates a new tree set copying a given set.
+	 *
+	 * @param c a collection to be copied into the new tree set. 
+	 */
+	 
+	public AVL_TREE_SET( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		this();
+		addAll( c );
+	}
+
+	/** Creates a new tree set copying a given sorted set (and its {@link Comparator}).
+	 *
+	 * @param s a {@link SortedSet} to be copied into the new tree set. 
+	 */
+	 
+	public AVL_TREE_SET( final SortedSet <KEY_GENERIC_CLASS> s ) {
+		this( s.comparator() );
+		addAll( s );
+	}
+
+	/** Creates a new tree set copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new tree set. 
+	 */
+	 
+	public AVL_TREE_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) {
+		this();
+		addAll( c );
+	}
+
+	/** Creates a new tree set copying a given type-specific sorted set (and its {@link Comparator}).
+	 *
+	 * @param s a type-specific sorted set to be copied into the new tree set. 
+	 */
+	 
+	public AVL_TREE_SET( final SORTED_SET KEY_GENERIC s ) {
+		this( s.comparator() );
+		addAll( s );
+	}
+
+
+	/** Creates a new tree set using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 */
+	 
+	public AVL_TREE_SET( final KEY_ITERATOR KEY_EXTENDS_GENERIC i ) {
+		while( i.hasNext() ) add( i.NEXT_KEY() );
+	}
+
+
+	/** Creates a new tree set using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 */
+	 
+	@SuppressWarnings("unchecked")
+	public AVL_TREE_SET( final Iterator<? extends KEY_GENERIC_CLASS> i ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ) );
+	}
+
+
+	/** Creates a new tree set and fills it with the elements of a given array using a given {@link Comparator}.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param c a {@link Comparator} (even better, a type-specific comparator).
+	 */
+	 
+	public AVL_TREE_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this( c );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		for( int i = 0; i < length; i++ ) add( a[ offset + i ] );
+	}
+
+
+	/** Creates a new tree set and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 */
+	 
+	public AVL_TREE_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) {
+		this( a, offset, length, null );
+	}
+
+
+	/** Creates a new tree set copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new tree set. 
+	 */
+
+	public AVL_TREE_SET( final KEY_GENERIC_TYPE[] a ) {
+		this();
+		int i = a.length;
+		while( i-- != 0 ) add( a[ i ] );
+	}
+
+
+	/** Creates a new tree set copying the elements of an array using a given {@link Comparator}.
+	 *
+	 * @param a an array to be copied into the new tree set. 
+	 * @param c a {@link Comparator} (even better, a type-specific comparator).
+	 */
+	 
+	public AVL_TREE_SET( final KEY_GENERIC_TYPE[] a, final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this( c );
+		int i = a.length;
+		while( i-- != 0 ) add( a[ i ] );
+	}
+
+
+
+
+	/*
+	 * The following methods implements some basic building blocks used by
+	 * all accessors.  They are (and should be maintained) identical to those used in AVLTreeMap.drv.
+	 *
+	 * The add()/remove() code is derived from Ben Pfaff's GNU libavl
+	 * (http://www.msu.edu/~pfaffben/avl/). If you want to understand what's
+	 * going on, you should have a look at the literate code contained therein
+	 * first.  
+	 */
+
+
+	/** Compares two keys in the right way. 
+	 *
+	 * <P>This method uses the {@link #actualComparator} if it is non-<code>null</code>.
+	 * Otherwise, it resorts to primitive type comparisons or to {@link Comparable#compareTo(Object) compareTo()}.
+	 *
+	 * @param k1 the first key.
+	 * @param k2 the second key.
+	 * @return a number smaller than, equal to or greater than 0, as usual
+	 * (i.e., when k1 < k2, k1 = k2 or k1 > k2, respectively).
+	 */
+	 
+	@SuppressWarnings("unchecked")
+	final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) {
+		return actualComparator == null ? KEY_CMP( k1, k2 ) : actualComparator.compare( k1, k2 );
+	}
+
+
+
+	/** Returns the entry corresponding to the given key, if it is in the tree; <code>null</code>, otherwise.
+	 *
+	 * @param k the key to search for.
+	 * @return the corresponding entry, or <code>null</code> if no entry with the given key exists.
+	 */
+
+	private Entry KEY_GENERIC findKey( final KEY_GENERIC_TYPE k ) {
+		Entry KEY_GENERIC e = tree;
+		int cmp;
+		  
+		while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) 
+			e = cmp < 0 ? e.left() : e.right();
+		  
+		return e;
+	}
+
+	/** Locates a key.
+	 *
+	 * @param k a key.
+	 * @return the last entry on a search for the given key; this will be
+	 * the given key, if it present; otherwise, it will be either the smallest greater key or the greatest smaller key.
+	 */
+
+	final Entry KEY_GENERIC locateKey( final KEY_GENERIC_TYPE k ) {
+		Entry KEY_GENERIC e = tree, last = tree;
+		int cmp = 0;
+		  
+		while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) {
+			last = e;
+			e = cmp < 0 ? e.left() : e.right();
+		}
+		  
+		return cmp == 0 ? e : last;
+	}
+
+
+	/** This vector remembers the path followed during the current insertion. It suffices for
+		about 2<sup>32</sup> entries. */
+	private transient boolean dirPath[];
+
+	private void allocatePaths() {
+		dirPath = new boolean[ 48 ];
+	}
+
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+
+		if ( tree == null ) { // The case of the empty tree is treated separately.
+			count++;
+			tree = lastEntry = firstEntry = new Entry KEY_GENERIC( k );
+		}
+		else {
+			Entry KEY_GENERIC p = tree, q = null, y = tree, z = null, e = null, w = null;
+			int cmp, i = 0;
+
+			while( true ) {
+				if ( ( cmp = compare( k, p.key ) ) == 0 ) return false;
+					 
+				if ( p.balance() != 0 ) {
+					i = 0;
+					z = q;
+					y = p;
+				}
+					 
+				if ( dirPath[ i++ ] = cmp > 0 ) {
+					if ( p.succ() ) {
+						count++;
+						e = new Entry KEY_GENERIC( k );
+								
+						if ( p.right == null ) lastEntry = e;
+								
+						e.left = p;
+						e.right = p.right;
+								
+						p.right( e );
+								
+						break;
+					}
+
+					q = p;
+					p = p.right;
+				}
+				else {
+					if ( p.pred() ) {
+						count++;
+						e = new Entry KEY_GENERIC( k );
+								
+						if ( p.left == null ) firstEntry = e;
+								
+						e.right = p;
+						e.left = p.left;
+								
+						p.left( e );
+
+						break;
+					}
+
+					q = p;
+					p = p.left;
+				}
+			}
+
+			p = y;
+			i = 0;
+
+			while( p != e ) {
+				if ( dirPath[ i ] ) p.incBalance();
+				else p.decBalance();
+
+				p = dirPath[ i++ ] ? p.right : p.left;
+			}
+				
+			if ( y.balance() == -2 ) {
+				Entry KEY_GENERIC x = y.left;
+
+				if ( x.balance() == -1 ) {
+					w = x;
+					if ( x.succ() ) {
+						x.succ( false );
+						y.pred( x );
+					}
+					else y.left = x.right;
+
+					x.right = y;
+					x.balance( 0 );
+					y.balance( 0 );
+				}
+				else {
+					if ( ASSERTS ) assert x.balance() == 1;
+
+					w = x.right;
+					x.right = w.left;
+					w.left = x;
+					y.left = w.right;
+					w.right = y;
+					if ( w.balance() == -1 ) {
+						x.balance( 0 );
+						y.balance( 1 );
+					}
+					else if ( w.balance() == 0 ) {
+						x.balance( 0 );
+						y.balance( 0 );
+					}
+					else {
+						x.balance( -1 );
+						y.balance( 0 );
+					}
+					w.balance( 0 );
+
+
+					if ( w.pred() ) {
+						x.succ( w );
+						w.pred( false );
+					}
+					if ( w.succ() ) {
+						y.pred( w );
+						w.succ( false );
+					}
+
+				}
+			}
+			else if ( y.balance() == +2 ) {
+				Entry KEY_GENERIC x = y.right;
+
+				if ( x.balance() == 1 ) {
+					w = x;
+					if ( x.pred() ) {
+						x.pred( false );
+						y.succ( x );
+					}
+					else y.right = x.left;
+
+					x.left = y;
+					x.balance( 0 );
+					y.balance( 0 );
+				}
+				else {
+					if ( ASSERTS ) assert x.balance() == -1;
+
+					w = x.left;
+					x.left = w.right;
+					w.right = x;
+					y.right = w.left;
+					w.left = y;
+					if ( w.balance() == 1 ) {
+						x.balance( 0 );
+						y.balance( -1 );
+					}
+					else if ( w.balance() == 0 ) {
+						x.balance( 0 );
+						y.balance( 0 );
+					}
+					else {
+						x.balance( 1 );
+						y.balance( 0 );
+					}
+					w.balance( 0 );
+
+
+					if ( w.pred() ) {
+						y.succ( w );
+						w.pred( false );
+					}
+					if ( w.succ() ) {
+						x.pred( w );
+						w.succ( false );
+					}
+
+				}
+			}
+			else return true;
+
+			if ( z == null ) tree = w;
+			else {
+				if ( z.left == y ) z.left = w;
+				else z.right = w;
+			}
+		}
+
+		if ( ASSERTS ) checkTree( tree );
+		return true;
+	}
+	 
+	 
+	 
+
+	/** Finds the parent of an entry.
+	 *
+	 * @param e a node of the tree.
+	 * @return the parent of the given node, or <code>null</code> for the root.
+	 */
+
+	private Entry KEY_GENERIC parent( final Entry KEY_GENERIC e ) {
+		if ( e == tree ) return null;
+
+		Entry KEY_GENERIC x, y, p;
+		x = y = e;
+
+		while( true ) {
+			if ( y.succ() ) {
+				p = y.right;
+				if ( p == null || p.left != e ) {
+					while( ! x.pred() ) x = x.left;
+					p = x.left;
+				}
+				return p;
+			}
+			else if ( x.pred() ) {
+				p = x.left;
+				if ( p == null || p.right != e ) {
+					while( ! y.succ() ) y = y.right;
+					p = y.right;
+				}
+				return p;
+			}
+
+			x = x.left;
+			y = y.right;
+		}
+	}
+ 
+
+	@SuppressWarnings("unchecked")
+	public boolean remove( final KEY_TYPE k ) {
+		if ( tree == null ) return false;
+
+		int cmp;
+		Entry KEY_GENERIC p = tree, q = null;
+		boolean dir = false;
+		final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k;
+
+		while( true ) {
+			if ( ( cmp = compare( kk, p.key ) ) == 0 ) break;
+			else if ( dir = cmp > 0 ) {
+				q = p;
+				if ( ( p = p.right() ) == null ) return false;
+			}
+			else {
+				q = p;
+				if ( ( p = p.left() ) == null ) return false;
+			}
+		}
+
+		if ( p.left == null ) firstEntry = p.next();
+		if ( p.right == null ) lastEntry = p.prev();
+
+		if ( p.succ() ) {
+			if ( p.pred() ) {
+				if ( q != null ) {
+					if ( dir ) q.succ( p.right );
+					else q.pred( p.left );
+				}
+				else tree = dir ? p.right : p.left;
+			}
+			else {
+				p.prev().right = p.right;
+
+				if ( q != null ) {
+					if ( dir ) q.right = p.left;
+					else q.left = p.left;
+				}
+				else tree = p.left;
+			}
+		}
+		else {
+			Entry KEY_GENERIC r = p.right;
+
+			if ( r.pred() ) {
+				r.left = p.left;
+				r.pred( p.pred() );
+				if ( ! r.pred() ) r.prev().right = r;
+				if ( q != null ) {
+					if ( dir ) q.right = r;
+					else q.left = r;
+				}
+				else tree = r;
+
+				r.balance( p.balance() );
+				q = r;
+				dir = true;
+
+			}
+			else {
+				Entry KEY_GENERIC s;
+
+				while( true ) {
+					s = r.left;
+					if ( s.pred() ) break;
+					r = s;
+				}
+
+				if ( s.succ() ) r.pred( s );
+				else r.left = s.right;
+
+				s.left = p.left;
+
+				if ( ! p.pred() ) {
+					p.prev().right = s;
+					s.pred( false );
+				}
+
+				s.right = p.right;
+				s.succ( false );
+								
+				if ( q != null ) {
+					if ( dir ) q.right = s;
+					else q.left = s;
+				}
+				else tree = s;
+
+				s.balance( p.balance() );
+				q = r;
+				dir = false;
+			}
+		}
+
+		Entry KEY_GENERIC y;
+
+		while( q != null ) {
+			y = q;
+			q = parent( y );
+
+			if ( ! dir ) {
+				dir = q != null && q.left != y;
+				y.incBalance();
+
+				if ( y.balance() == 1 ) break;
+				else if ( y.balance() == 2 ) {
+
+					Entry KEY_GENERIC x = y.right;
+					if ( ASSERTS ) assert x != null;
+									 
+					if ( x.balance() == -1 ) {
+						Entry KEY_GENERIC w;
+								
+						if ( ASSERTS ) assert x.balance() == -1;
+
+						w = x.left;
+						x.left = w.right;
+						w.right = x;
+						y.right = w.left;
+						w.left = y;
+
+						if ( w.balance() == 1 ) {
+							x.balance( 0 );
+							y.balance( -1 );
+						}
+						else if ( w.balance() == 0 ) {
+							x.balance( 0 );
+							y.balance( 0 );
+						}
+						else {
+							if ( ASSERTS ) assert w.balance() == -1;
+
+							x.balance( 1 );
+							y.balance( 0 );
+						}
+
+						w.balance( 0 );
+
+						if ( w.pred() ) {
+							y.succ( w );
+							w.pred( false );
+						}
+						if ( w.succ() ) {
+							x.pred( w );
+							w.succ( false );
+						}
+
+						if ( q != null ) {
+							if ( dir ) q.right = w;
+							else q.left = w;
+						}
+						else tree = w;
+					}
+					else {
+						if ( q != null ) {
+							if ( dir ) q.right = x;
+							else q.left = x;
+						}
+						else tree = x;
+										  
+						if ( x.balance() == 0 ) {
+							y.right = x.left;
+							x.left = y;
+							x.balance( -1 );
+							y.balance( +1 );
+							break;
+						}
+
+						if ( ASSERTS ) assert x.balance() == 1;
+
+						if ( x.pred() ) {
+							y.succ( true );
+							x.pred( false );
+						}
+						else y.right = x.left;
+
+						x.left = y;
+						y.balance( 0 );
+						x.balance( 0 );
+					}
+				}
+			}
+			else {
+				dir = q != null && q.left != y;
+				y.decBalance();
+
+				if ( y.balance() == -1 ) break;
+				else if ( y.balance() == -2 ) {
+
+					Entry KEY_GENERIC x = y.left;
+					if ( ASSERTS ) assert x != null;
+									 
+					if ( x.balance() == 1 ) {
+						Entry KEY_GENERIC w;
+								
+						if ( ASSERTS ) assert x.balance() == 1;
+
+						w = x.right;
+						x.right = w.left;
+						w.left = x;
+						y.left = w.right;
+						w.right = y;
+
+						if ( w.balance() == -1 ) {
+							x.balance( 0 );
+							y.balance( 1 );
+						}
+						else if ( w.balance() == 0 ) {
+							x.balance( 0 );
+							y.balance( 0 );
+						}
+						else {
+							if ( ASSERTS ) assert w.balance() == 1;
+
+							x.balance( -1 );
+							y.balance( 0 );
+						}
+
+						w.balance( 0 );
+
+						if ( w.pred() ) {
+							x.succ( w );
+							w.pred( false );
+						}
+						if ( w.succ() ) {
+							y.pred( w );
+							w.succ( false );
+						}
+
+						if ( q != null ) {
+							if ( dir ) q.right = w;
+							else q.left = w;
+						}
+						else tree = w;
+					}
+					else {
+						if ( q != null ) {
+							if ( dir ) q.right = x;
+							else q.left = x;
+						}
+						else tree = x;
+										  
+						if ( x.balance() == 0 ) {
+							y.left = x.right;
+							x.right = y;
+							x.balance( +1 );
+							y.balance( -1 );
+							break;
+						}
+
+						if ( ASSERTS ) assert x.balance() == -1;
+
+						if ( x.succ() ) {
+							y.pred( true );
+							x.succ( false );
+						}
+						else y.left = x.right;
+
+						x.right = y;
+						y.balance( 0 );
+						x.balance( 0 );
+					}
+				}  
+			}
+		}
+
+		count--;
+		if ( ASSERTS ) checkTree( tree );
+		return true;
+	}
+
+	@SuppressWarnings("unchecked")
+	public boolean contains( final KEY_TYPE k ) {
+		return findKey( KEY_GENERIC_CAST k ) != null;
+	}
+
+#if #keysclass(Object)
+	public K get( final KEY_TYPE k ) {
+		final Entry KEY_GENERIC entry = findKey( KEY_GENERIC_CAST k );
+		return entry == null ? null : entry.getKey();
+	}
+#endif
+
+	public void clear() {
+		count = 0;
+		tree = null;
+		firstEntry = lastEntry = null;
+	}
+
+	 
+	/** This class represent an entry in a tree set.
+	 *
+	 * <P>We use the only "metadata", i.e., {@link Entry#info}, to store
+	 * information about balance, predecessor status and successor status.
+	 *
+	 * <P>Note that since the class is recursive, it can be
+	 * considered equivalently a tree.
+	 */
+
+	private static final class Entry KEY_GENERIC implements Cloneable {
+		/** If the bit in this mask is true, {@link #right} points to a successor. */
+		private final static int SUCC_MASK = 1 << 31;
+		/** If the bit in this mask is true, {@link #left} points to a predecessor. */
+		private final static int PRED_MASK = 1 << 30;
+		/** The bits in this mask hold the node balance info. You can get it just by casting to byte. */
+		private final static int BALANCE_MASK = 0xFF;
+		/** The key of this entry. */
+		KEY_GENERIC_TYPE key;
+		/** The pointers to the left and right subtrees. */
+		Entry KEY_GENERIC left, right;
+		/** This integers holds different information in different bits (see {@link #SUCC_MASK}, {@link #PRED_MASK} and {@link #BALANCE_MASK}). */
+		int info;
+
+		Entry() {}
+		  
+		/** Creates a new entry with the given key.
+		 *
+		 * @param k a key.
+		 */
+		Entry( final KEY_GENERIC_TYPE k ) {
+			this.key = k;
+			info = SUCC_MASK | PRED_MASK;
+		}
+		  
+		/** Returns the left subtree. 
+		 *
+		 * @return the left subtree (<code>null</code> if the left
+		 * subtree is empty).
+		 */
+		Entry KEY_GENERIC left() {
+			return ( info & PRED_MASK ) != 0 ? null : left;
+		}
+		  
+		/** Returns the right subtree. 
+		 *
+		 * @return the right subtree (<code>null</code> if the right
+		 * subtree is empty).
+		 */
+		Entry KEY_GENERIC right() {
+			return ( info & SUCC_MASK ) != 0 ? null : right;
+		}
+		  
+		/** Checks whether the left pointer is really a predecessor.
+		 * @return true if the left pointer is a predecessor.
+		 */
+		boolean pred() {
+			return ( info & PRED_MASK ) != 0;
+		}
+		  
+		/** Checks whether the right pointer is really a successor.
+		 * @return true if the right pointer is a successor.
+		 */
+		boolean succ() {
+			return ( info & SUCC_MASK ) != 0;
+		}
+		  
+		/** Sets whether the left pointer is really a predecessor.
+		 * @param pred if true then the left pointer will be considered a predecessor.
+		 */
+		void pred( final boolean pred ) {
+			if ( pred ) info |= PRED_MASK;
+			else info &= ~PRED_MASK;
+		}
+		  
+		/** Sets whether the right pointer is really a successor.
+		 * @param succ if true then the right pointer will be considered a successor.
+		 */
+		void succ( final boolean succ ) {
+			if ( succ ) info |= SUCC_MASK;
+			else info &= ~SUCC_MASK;
+		}
+		  
+		/** Sets the left pointer to a predecessor.
+		 * @param pred the predecessr.
+		 */
+		void pred( final Entry KEY_GENERIC pred ) {
+			info |= PRED_MASK;
+			left = pred;
+		}
+		  
+		/** Sets the right pointer to a successor.
+		 * @param succ the successor.
+		 */
+		void succ( final Entry KEY_GENERIC succ ) {
+			info |= SUCC_MASK;
+			right = succ;
+		}
+		  
+		/** Sets the left pointer to the given subtree.
+		 * @param left the new left subtree.
+		 */
+		void left( final Entry KEY_GENERIC left ) {
+			info &= ~PRED_MASK;
+			this.left = left;
+		}
+		  
+		/** Sets the right pointer to the given subtree.
+		 * @param right the new right subtree.
+		 */
+		void right( final Entry KEY_GENERIC right ) {
+			info &= ~SUCC_MASK;
+			this.right = right;
+		}
+		  
+		/** Returns the current level of the node.
+		 * @return the current level of this node.
+		 */
+		int balance() {
+			return (byte)info;
+		}
+
+		/** Sets the level of this node.
+		 * @param level the new level of this node.
+		 */
+		void balance( int level ) {
+			info &= ~BALANCE_MASK;
+			info |= ( level & BALANCE_MASK );
+		}
+
+		/** Increments the level of this node. */
+		void incBalance() {
+			info = info & ~BALANCE_MASK | ( (byte)info + 1 ) & 0xFF;
+		}
+		  
+		/** Decrements the level of this node. */
+		protected	void decBalance() {
+			info = info & ~BALANCE_MASK | ( (byte)info - 1 ) & 0xFF;
+		}
+		  
+		/** Computes the next entry in the set order.
+		 *
+		 * @return the next entry (<code>null</code>) if this is the last entry).
+		 */
+
+		Entry KEY_GENERIC next() {
+			Entry KEY_GENERIC next = this.right;
+			if ( ( info & SUCC_MASK ) == 0 ) while ( ( next.info & PRED_MASK ) == 0 ) next = next.left;
+			return next;
+		}
+
+		/** Computes the previous entry in the set order.
+		 *
+		 * @return the previous entry (<code>null</code>) if this is the first entry).
+		 */
+
+		Entry KEY_GENERIC prev() {
+			Entry KEY_GENERIC prev = this.left;
+			if ( ( info & PRED_MASK ) == 0 ) while ( ( prev.info & SUCC_MASK ) == 0 ) prev = prev.right;
+			return prev;
+		}
+
+		@SuppressWarnings("unchecked")
+		public Entry KEY_GENERIC clone() {
+			Entry KEY_GENERIC c;
+			try {
+				c = (Entry KEY_GENERIC)super.clone();
+			}
+			catch(CloneNotSupportedException cantHappen) {
+				throw new InternalError();
+			}
+
+			c.key = key;
+			c.info = info;
+
+			return c;
+		}
+
+		public boolean equals( final Object o ) {
+			if (!(o instanceof Entry)) return false;
+			Entry KEY_GENERIC_WILDCARD e = (Entry KEY_GENERIC_WILDCARD)o;
+				
+			return KEY_EQUALS(key, e.key);
+		}
+		  
+		public int hashCode() {
+			return KEY2JAVAHASH(key);
+		}
+		  
+		  
+		public String toString() {
+			return String.valueOf( key );
+		}
+		  
+		/*
+		  public void prettyPrint() {
+		  prettyPrint(0);
+		  }
+
+		  public void prettyPrint(int level) {
+		  if ( pred() ) {
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println("pred: " + left );
+		  }
+		  else if (left != null)
+		  left.prettyPrint(level +1 );
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println(key + " (" + level() + ")");
+		  if ( succ() ) {
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println("succ: " + right );
+		  }
+		  else if (right != null)
+		  right.prettyPrint(level + 1);
+		  }
+		*/
+	}
+	 
+	/*
+	  public void prettyPrint() {
+	  System.err.println("size: " + count);
+	  if (tree != null) tree.prettyPrint();
+	  }
+	*/
+	
+	public int size() {
+		return count;
+	}
+	 
+	public boolean isEmpty() {
+		return count == 0;
+	}
+	 
+	public KEY_GENERIC_TYPE FIRST() {
+		if ( tree == null ) throw new NoSuchElementException();
+		return firstEntry.key;
+	}
+	 
+	public KEY_GENERIC_TYPE LAST() {
+		if ( tree == null ) throw new NoSuchElementException();
+		return lastEntry.key;
+	}
+	 
+
+	/** An iterator on the whole range.
+	 *
+	 * <P>This class can iterate in both directions on a threaded tree.
+	 */
+
+	private class SetIterator extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC {
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). */
+		Entry KEY_GENERIC prev;
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). */
+		Entry KEY_GENERIC next;
+		/** The last entry that was returned (or <code>null</code> if we did not iterate or used {@link #remove()}). */
+		Entry KEY_GENERIC curr;
+		/** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this {@link SetIterator} has been created using the nonempty constructor.*/
+		int index = 0;
+		  
+		SetIterator() {
+			next = firstEntry;
+		}
+
+		SetIterator( final KEY_GENERIC_TYPE k ) {
+			if ( ( next = locateKey( k ) ) != null ) {
+				if ( compare( next.key, k ) <= 0 ) {
+					prev = next;
+					next = next.next();
+				}
+				else prev = next.prev();
+			}
+		}
+
+		public boolean hasNext() { return next != null; }
+		public boolean hasPrevious() { return prev != null; }
+
+		void updateNext() {
+			next = next.next();
+		}
+
+		Entry KEY_GENERIC nextEntry() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			curr = prev = next;
+			index++;
+			updateNext();
+			return curr;
+		}
+
+		public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; }
+		public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; }
+
+		void updatePrevious() {
+			prev = prev.prev();
+		}
+
+		Entry KEY_GENERIC previousEntry() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+			curr = next = prev;
+			index--;
+			updatePrevious();
+			return curr;
+		}
+
+		public int nextIndex() {
+			return index;
+		}
+
+		public int previousIndex() {
+			return index - 1;
+		}
+
+		public void remove() {
+			if ( curr == null ) throw new IllegalStateException();
+			/* If the last operation was a next(), we are removing an entry that preceeds
+			   the current index, and thus we must decrement it. */
+			if ( curr == prev ) index--;
+			next = prev = curr;
+			updatePrevious();
+			updateNext();
+			AVL_TREE_SET.this.remove( curr.key );
+			curr = null;
+		}
+	}
+
+
+	public KEY_BIDI_ITERATOR KEY_GENERIC iterator() {
+		return new SetIterator();
+	}
+
+	public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) {
+		return new SetIterator( from );
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+		return actualComparator;
+	}
+
+	public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) {
+		return new Subset( KEY_NULL, true, to, false );
+	}
+
+	public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) {
+		return new Subset( from, false, KEY_NULL, true );
+	}
+
+	public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) {
+		return new Subset( from, false, to, false );
+	}
+
+
+
+	/** A subset with given range.
+	 *
+	 * <P>This class represents a subset. One has to specify the left/right
+	 * limits (which can be set to -∞ or ∞). Since the subset is a
+	 * view on the set, at a given moment it could happen that the limits of
+	 * the range are not any longer in the main set. Thus, things such as
+	 * {@link java.util.SortedSet#first()} or {@link java.util.SortedSet#size()} must be always computed
+	 * on-the-fly.  
+	 */
+	private final class Subset extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, SORTED_SET KEY_GENERIC {
+    	private static final long serialVersionUID = -7046029254386353129L;
+
+		/** The start of the subset range, unless {@link #bottom} is true. */
+		KEY_GENERIC_TYPE from;
+		/** The end of the subset range, unless {@link #top} is true. */
+		KEY_GENERIC_TYPE to;
+		/** If true, the subset range starts from -∞. */
+		boolean bottom;
+		/** If true, the subset range goes to ∞. */
+		boolean top;
+		  
+		/** Creates a new subset with given key range.
+		 *
+		 * @param from the start of the subset range.
+		 * @param bottom if true, the first parameter is ignored and the range starts from -∞.
+		 * @param to the end of the subset range.
+		 * @param top if true, the third parameter is ignored and the range goes to ∞.
+		 */
+		public Subset( final KEY_GENERIC_TYPE from, final boolean bottom, final KEY_GENERIC_TYPE to, final boolean top ) {
+			if ( ! bottom && ! top && AVL_TREE_SET.this.compare( from, to ) > 0 ) throw new IllegalArgumentException( "Start element (" + from  + ") is larger than end element (" + to + ")" );
+
+			this.from = from;
+			this.bottom = bottom;
+			this.to = to;
+			this.top = top;
+		}
+
+		public void clear() {
+			final SubsetIterator i = new SubsetIterator();
+			while( i.hasNext() ) {
+				i.next();
+				i.remove();
+			}
+		}
+
+		/** Checks whether a key is in the subset range.
+		 * @param k a key.
+		 * @return true if is the key is in the subset range.
+		 */
+		final boolean in( final KEY_GENERIC_TYPE k ) {
+			return ( bottom || AVL_TREE_SET.this.compare( k, from ) >= 0 ) &&
+				( top || AVL_TREE_SET.this.compare( k, to ) < 0 );
+		}
+
+		@SuppressWarnings("unchecked")
+		public boolean contains( final KEY_TYPE k ) {
+			return in( KEY_GENERIC_CAST k ) && AVL_TREE_SET.this.contains( k );
+		}
+
+		public boolean add( final KEY_GENERIC_TYPE  k ) {
+			if ( ! in( k ) ) throw new IllegalArgumentException( "Element (" + k + ") out of range [" + ( bottom ? "-" : String.valueOf( from ) ) + ", " + ( top ? "-" : String.valueOf( to ) ) + ")" ); 
+			return AVL_TREE_SET.this.add( k );
+		}
+
+		@SuppressWarnings("unchecked")
+		public boolean remove( final KEY_TYPE k ) {
+			if ( ! in( KEY_GENERIC_CAST k ) ) return false;
+			return AVL_TREE_SET.this.remove( k );
+		}
+
+		public int size() {
+			final SubsetIterator i = new SubsetIterator();
+			int n = 0;
+				
+			while( i.hasNext() ) {
+				n++;
+				i.next();
+			}
+				
+			return n;
+		}
+
+
+		public boolean isEmpty() {
+			return ! new SubsetIterator().hasNext();
+		}
+		  
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+			return actualComparator;
+		}
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() {
+			return new SubsetIterator();
+		}
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) {
+			return new SubsetIterator( from );
+		}
+
+		public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) {
+			if ( top ) return new Subset( from, bottom, to, false );
+			return compare( to, this.to ) < 0 ? new Subset( from, bottom, to, false ) : this;
+		}
+		  
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) {
+			if ( bottom ) return new Subset( from, false, to, top );
+			return compare( from, this.from ) > 0 ? new Subset( from, false, to, top ) : this;
+		}
+		  
+		public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) {
+			if ( top && bottom ) return new Subset( from, false, to, false );
+			if ( ! top ) to = compare( to, this.to ) < 0 ? to : this.to;
+			if ( ! bottom ) from = compare( from, this.from ) > 0 ? from : this.from;
+            if ( ! top && ! bottom && from == this.from && to == this.to ) return this;
+			return new Subset( from, false, to, false );
+		}
+
+
+		/** Locates the first entry.
+		 *
+		 * @return the first entry of this subset, or <code>null</code> if the subset is empty.
+		 */
+		public AVL_TREE_SET.Entry KEY_GENERIC firstEntry() {
+			if ( tree == null ) return null;
+			// If this subset goes to -infinity, we return the main set first entry; otherwise, we locate the start of the set.
+			AVL_TREE_SET.Entry KEY_GENERIC e;
+			if ( bottom ) e = firstEntry;
+			else {
+				e = locateKey( from );
+				// If we find either the start or something greater we're OK.
+				if ( compare( e.key, from ) < 0 ) e = e.next();
+			}
+			// Finally, if this subset doesn't go to infinity, we check that the resulting key isn't greater than the end.
+			if ( e == null || ! top && compare( e.key, to ) >= 0 ) return null;
+			return e;
+		}
+	 
+		/** Locates the last entry.
+		 *
+		 * @return the last entry of this subset, or <code>null</code> if the subset is empty.
+		 */
+		public AVL_TREE_SET.Entry KEY_GENERIC lastEntry() {
+			if ( tree == null ) return null;
+			// If this subset goes to infinity, we return the main set last entry; otherwise, we locate the end of the set.
+			AVL_TREE_SET.Entry KEY_GENERIC e;
+			if ( top ) e = lastEntry;
+			else {
+				e = locateKey( to );
+				// If we find something smaller than the end we're OK.
+				if ( compare( e.key, to ) >= 0 ) e = e.prev();
+			}
+			// Finally, if this subset doesn't go to -infinity, we check that the resulting key isn't smaller than the start.
+			if ( e == null || ! bottom && compare( e.key, from ) < 0 ) return null;
+			return e;
+		}
+
+
+		public KEY_GENERIC_TYPE FIRST() {
+			AVL_TREE_SET.Entry KEY_GENERIC e = firstEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.key;
+		}
+
+		public KEY_GENERIC_TYPE LAST() {
+			AVL_TREE_SET.Entry KEY_GENERIC e = lastEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.key;
+		}
+	 
+		/** An iterator for subranges.
+		 * 
+		 * <P>This class inherits from {@link SetIterator}, but overrides the methods that
+		 * update the pointer after a {@link java.util.ListIterator#next()} or {@link java.util.ListIterator#previous()}. If we would
+		 * move out of the range of the subset we just overwrite the next or previous
+		 * entry with <code>null</code>.
+		 */
+		private final class SubsetIterator extends SetIterator {
+			SubsetIterator() {
+				next = firstEntry();
+			}
+
+			SubsetIterator( final KEY_GENERIC_TYPE k ) {
+				this();
+
+				if ( next != null ) {
+					if ( ! bottom && compare( k, next.key ) < 0 ) prev = null;
+					else if ( ! top && compare( k, ( prev = lastEntry() ).key ) >= 0 ) next = null;
+					else {
+						next = locateKey( k );
+								
+						if ( compare( next.key, k ) <= 0 ) {
+							prev = next;
+							next = next.next();
+						}
+						else prev = next.prev();
+					}
+				}
+			}
+
+			void updatePrevious() {
+				prev = prev.prev();
+				if ( ! bottom && prev != null && AVL_TREE_SET.this.compare( prev.key, from ) < 0 ) prev = null;
+			}
+				
+			void updateNext() {
+				next = next.next();
+				if ( ! top && next != null && AVL_TREE_SET.this.compare( next.key, to ) >= 0 ) next = null;
+			}
+		}
+	}
+
+
+
+	/** Returns a deep copy of this tree set.
+	 *
+	 * <P>This method performs a deep copy of this tree set; the data stored in the
+	 * set, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 * @return a deep copy of this tree set.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public Object clone() {
+		AVL_TREE_SET KEY_GENERIC c;
+		try {
+			c = (AVL_TREE_SET KEY_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+
+		c.allocatePaths();
+
+		if ( count != 0 ) {
+			// Also this apparently unfathomable code is derived from GNU libavl.
+			Entry KEY_GENERIC e, p, q, rp = new Entry KEY_GENERIC(), rq = new Entry KEY_GENERIC();
+
+			p = rp;
+			rp.left( tree );
+
+			q = rq;
+			rq.pred( null );
+
+			while( true ) {
+				if ( ! p.pred() ) {
+					e = p.left.clone();
+					e.pred( q.left );
+					e.succ( q );
+					q.left( e );
+
+					p = p.left;
+					q = q.left;
+				}
+				else {
+					while( p.succ() ) {
+						p = p.right;
+
+						if ( p == null ) {
+							q.right = null;
+							c.tree = rq.left;
+
+							c.firstEntry = c.tree;
+							while( c.firstEntry.left != null ) c.firstEntry = c.firstEntry.left;
+							c.lastEntry = c.tree;
+							while( c.lastEntry.right != null ) c.lastEntry = c.lastEntry.right;
+
+							return c;
+						}
+						q = q.right;
+					}
+								
+					p = p.right;
+					q = q.right;
+				}
+
+				if ( ! p.succ() ) {
+					e = p.right.clone();
+					e.succ( q.right );
+					e.pred( q );
+					q.right( e );
+				}
+			}
+		}
+
+		return c;
+	}
+	 
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		int n = count;
+		SetIterator i = new SetIterator();
+
+		s.defaultWriteObject();
+		while( n-- != 0 ) s.WRITE_KEY( i.NEXT_KEY() );
+	}
+
+
+	/** Reads the given number of entries from the input stream, returning the corresponding tree. 
+	 *
+	 * @param s the input stream.
+	 * @param n the (positive) number of entries to read.
+	 * @param pred the entry containing the key that preceeds the first key in the tree.
+	 * @param succ the entry containing the key that follows the last key in the tree.
+	 */
+	@SuppressWarnings("unchecked")
+	private Entry KEY_GENERIC readTree( final java.io.ObjectInputStream s, final int n, final Entry KEY_GENERIC pred, final Entry KEY_GENERIC succ ) throws java.io.IOException, ClassNotFoundException {
+		if ( n == 1 ) {
+			final Entry KEY_GENERIC top = new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() );
+			top.pred( pred );
+			top.succ( succ );
+
+			return top;
+		}
+
+		if ( n == 2 ) {
+			/* We handle separately this case so that recursion will
+			 *always* be on nonempty subtrees. */
+			final Entry KEY_GENERIC top = new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() );
+			top.right( new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() ) );
+			top.right.pred( top );
+			top.balance( 1 );
+			top.pred( pred );
+			top.right.succ( succ );
+			
+			return top;
+		}
+
+		// The right subtree is the largest one.
+		final int rightN = n / 2, leftN = n - rightN - 1;
+
+		final Entry KEY_GENERIC top = new Entry KEY_GENERIC();
+
+		top.left( readTree( s, leftN, pred, top ) );
+		
+		top.key = KEY_GENERIC_CAST s.READ_KEY();
+
+		top.right( readTree( s, rightN, top, succ ) );
+
+		if ( n == ( n & -n ) ) top.balance( 1 ); // Quick test for determining whether n is a power of 2.
+
+		return top;
+	}
+
+
+	private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		/* The storedComparator is now correctly set, but we must restore
+		   on-the-fly the actualComparator. */
+		setActualComparator();
+		allocatePaths();
+
+		if ( count != 0 ) {
+			tree = readTree( s, count, null, null );
+			Entry KEY_GENERIC e;
+
+			e = tree;
+			while( e.left() != null ) e = e.left();
+			firstEntry = e;
+
+			e = tree;
+			while( e.right() != null ) e = e.right();
+			lastEntry = e;
+		}
+
+		if ( ASSERTS ) checkTree( tree );
+	}
+
+	@SuppressWarnings("rawtypes")
+#ifdef ASSERTS_CODE
+	private static KEY_GENERIC int checkTree( Entry KEY_GENERIC e ) {
+		if ( e == null ) return 0;
+
+		final int leftN = checkTree( e.left() ), rightN = checkTree( e.right() );
+		if ( leftN + e.balance() != rightN )
+			throw new AssertionError( "Mismatch between left tree size (" + leftN + "), right tree size (" + rightN + ") and balance (" + e.balance() + ")" );
+
+		return Math.max( leftN , rightN ) + 1;
+	}
+#else
+	private static KEY_GENERIC int checkTree( @SuppressWarnings("unused") Entry KEY_GENERIC e ) { return 0; }
+#endif
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		int i, j;
+		AVL_TREE_SET m;
+		java.util.TreeSet t;
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+		}
+		  
+		double totAdd = 0, totYes = 0, totNo = 0, totIterFor = 0, totIterBack = 0, totRemYes = 0, d, dd;
+
+		if ( comp ) {
+			for( j = 0; j < 20; j++ ) {
+
+				t = new java.util.TreeSet();
+
+				/* We first add all pairs to t. */
+				for( i = 0; i < n;  i++ ) t.add( KEY2OBJ( k[i] ) );
+
+				/* Then we remove the first half and put it back. */
+				for( i = 0; i < n/2;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+
+				ms = System.currentTimeMillis();
+				for( i = 0; i < n/2;  i++ ) t.add( KEY2OBJ( k[i] ) );
+				d = System.currentTimeMillis() - ms;
+
+				/* Then we remove the other half and put it back again. */
+				ms = System.currentTimeMillis();
+				for( i = n/2; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+				dd = System.currentTimeMillis() - ms ;
+
+				ms = System.currentTimeMillis();
+				for( i = n/2; i < n;  i++ ) t.add( KEY2OBJ( k[i] ) );
+				d += System.currentTimeMillis() - ms;
+				if ( j > 2 ) totAdd += n/d; 				
+				System.out.print("Add: " + format( n/d ) +" K/s " );
+
+				/* Then we remove again the first half. */
+				ms = System.currentTimeMillis();
+				for( i = 0; i < n/2;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+				dd += System.currentTimeMillis() - ms ;
+				if ( j > 2 ) totRemYes += n/dd; 				
+				System.out.print("RemYes: " + format( n/dd ) +" K/s " );
+
+				/* And then we put it back. */
+				for( i = 0; i < n/2;  i++ ) t.add( KEY2OBJ( k[i] ) );
+
+				/* We check for pairs in t. */
+				ms = System.currentTimeMillis();
+				for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( k[i] ) );
+				d = 1.0 * n / (System.currentTimeMillis() - ms );
+				if ( j > 2 ) totYes += d; 				
+				System.out.print("Yes: " + format( d ) +" K/s " );
+
+				/* We check for pairs not in t. */
+				ms = System.currentTimeMillis();
+				for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( nk[i] ) );
+				d = 1.0 * n / (System.currentTimeMillis() - ms );
+				if ( j > 2 ) totNo += d; 				
+				System.out.print("No: " + format( d ) +" K/s " );
+
+				/* We iterate on t. */
+				ms = System.currentTimeMillis();
+				for( Iterator it = t.iterator(); it.hasNext(); it.next() );
+				d = 1.0 * n / (System.currentTimeMillis() - ms );
+				if ( j > 2 ) totIterFor += d; 				
+				System.out.print("IterFor: " + format( d ) +" K/s " );
+				
+				System.out.println();
+			}
+
+			System.out.println();
+			System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) )  + " K/s"  );
+
+			System.out.println();
+
+			totAdd = totYes = totNo = totIterFor = totIterBack = totRemYes = 0;
+
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new AVL_TREE_SET();
+
+			/* We first add all pairs to m. */
+			for( i = 0; i < n;  i++ ) m.add( k[i] );
+
+			/* Then we remove the first half and put it back. */
+			for( i = 0; i < n/2;  i++ ) m.remove( k[i] );
+
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) m.add( k[i] );
+			d = System.currentTimeMillis() - ms;
+
+			/* Then we remove the other half and put it back again. */
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) m.remove( k[i] );
+			dd = System.currentTimeMillis() - ms ;
+
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) m.add( k[i] );
+			d += System.currentTimeMillis() - ms;
+			if ( j > 2 ) totAdd += n/d; 				
+			System.out.print("Add: " + format( n/d ) +" K/s " );
+
+			/* Then we remove again the first half. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) m.remove( k[i] );
+			dd += System.currentTimeMillis() - ms ;
+			if ( j > 2 ) totRemYes += n/dd; 				
+			System.out.print("RemYes: " + format( n/dd ) +" K/s " );
+
+			/* And then we put it back. */
+			for( i = 0; i < n/2;  i++ ) m.add( k[i] );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on m. */
+			KEY_LIST_ITERATOR it = (KEY_LIST_ITERATOR)m.iterator();
+			ms = System.currentTimeMillis();
+			for( ; it.hasNext(); it.NEXT_KEY() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterFor += d; 				
+			System.out.print("IterFor: " + format( d ) +" K/s " );
+				
+			/* We iterate back on m. */
+			ms = System.currentTimeMillis();
+			for( ; it.hasPrevious(); it.PREV_KEY() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterBack += d; 				
+			System.out.print("IterBack: " + format( d ) +" K/s " );
+
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Add: " + format( totAdd/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) )  + " K/s IterBack: " + format( totIterBack/(j-3) ) + "K/s"  );
+
+		System.out.println();
+	}
+
+
+	private static boolean valEquals(Object o1, Object o2) {
+		return o1 == null ? o2 == null : o1.equals(o2);
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static Object[] k, v, nk;
+	private static KEY_TYPE kt[];
+	private static KEY_TYPE nkt[];
+	private static AVL_TREE_SET topSet;
+
+	protected static void testSets( SORTED_SET m, SortedSet t, int n, int level ) {
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement;
+		boolean rt = false, rm = false;
+
+		if ( level > 4 ) return;
+				
+
+		/* Now we check that both sets agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.first();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.first();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): first() divergence at start in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.first().equals( m.first() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.first() + ", " + t.first() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.last();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.last();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): last() divergence at start in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+
+		if ( ! mThrowsNoElement ) ensure( t.last().equals( m.last() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.last() + ", " + t.last() +")");
+
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+
+
+		/* Now we check that m actually holds that data. */
+		for(Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(Iterator i=m.iterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" );
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.contains(T);
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( m.contains(KEY2OBJ(T)) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in keys between t and m (polymorphic method)" );
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( m.contains(KEY2OBJ(T)) ==  t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence between t and m (standard method)" );
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				rm = m.add(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				rt = t.add(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): add() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): add() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in add() between t and m" );
+
+			T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in remove() between t and m" );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal" );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(Iterator i=m.iterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on m)" );
+		}
+
+		/* Now we check that both sets agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.first();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.first();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): first() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.first().equals( m.first() ), "Error (" + level + ", " + seed + "): m and t differ on their first key (" + m.first() + ", " + t.first() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.last();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.last();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): last() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+		if ( ! mThrowsNoElement ) ensure( t.last().equals( m.last() ), "Error (" + level + ", " + seed + "): m and t differ on their last key (" + m.last() + ", " + t.last() +")");
+
+		/* Now we check cloning. */
+
+		if ( level == 0 ) {
+			ensure( m.equals( ((AVL_TREE_SET)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((AVL_TREE_SET)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+		}
+
+		int h = m.hashCode();
+
+
+		/* Now we save and read m. */
+
+		SORTED_SET m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (SORTED_SET)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(Iterator i=t.iterator(); i.hasNext(); ) m2.remove(i.next());
+
+		ensure( m2.isEmpty(), "Error (" + level + ", " + seed + "): m2 is not empty (as it should be)" );
+				 
+		/* Now we play with iterators. */
+
+		{
+			java.util.ListIterator i, j;
+			Object J;
+			i = (java.util.ListIterator)m.iterator(); 
+			j = new java.util.LinkedList( t ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + level + ", " + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( i.next().equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next()" );
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous()" );
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + level + ", " + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + level + ", " + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+
+		{
+			boolean badPrevious = false;
+			Object previous = null;
+			it.unimi.dsi.fastutil.BidirectionalIterator i;
+			java.util.ListIterator j;
+			Object I, J;
+			KEY_TYPE from = genKey();
+			j = new java.util.LinkedList( t ).listIterator(); 
+			while( j.hasNext() ) {
+				Object k = j.next();
+				if ( ((Comparable)k).compareTo( KEY2OBJ( from ) ) > 0 ) {
+					badPrevious = true;
+					j.previous();
+					break;
+				}
+				previous = k;
+			}
+
+			i = (it.unimi.dsi.fastutil.BidirectionalIterator)m.iterator( from ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+					//System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+
+					badPrevious = false;
+
+					if ( r.nextFloat() < 0.5 ) {
+						//System.err.println("Removing in next");
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+
+					if ( r.nextFloat() < 0.5 ) {
+						//System.err.println("Removing in prev");
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" );
+
+		/* Now we select a pair of keys and create a subset. */
+
+		if ( ! m.isEmpty() ) {
+			java.util.ListIterator i;
+			Object start = m.first(), end = m.first();
+			for( i = (java.util.ListIterator)m.iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() );
+			for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() );
+
+			//System.err.println("Checking subSet from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testSets( (SORTED_SET)m.subSet( (KEY_CLASS)start, (KEY_CLASS)end ), t.subSet( start, end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subSet" );
+
+			//System.err.println("Checking headSet to " + end + " (level=" + (level+1) + ")..." );
+			testSets( (SORTED_SET)m.headSet( (KEY_CLASS)end ), t.headSet( end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headSet" );
+
+			//System.err.println("Checking tailSet from " + start + " (level=" + (level+1) + ")..." );
+			testSets( (SORTED_SET)m.tailSet( (KEY_CLASS)start ), t.tailSet( start ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailSet" );
+		}
+		  
+
+	}
+
+
+	private static void test( int n ) {
+		AVL_TREE_SET m = new AVL_TREE_SET();
+		SortedSet t = new java.util.TreeSet();
+		topSet = m;
+		k = new Object[n];
+		nk = new Object[n];
+		kt = new KEY_TYPE[n];
+		nkt = new KEY_TYPE[n];
+
+		for( int i = 0; i < n; i++ ) {
+#if #keyclass(Object)
+			k[i] = kt[i] = genKey();
+			nk[i] = nkt[i] = genKey();
+#else
+			k[i] = new KEY_CLASS( kt[i] = genKey() );
+			nk[i] = new KEY_CLASS( nkt[i] = genKey() );
+#endif
+		}
+		  
+		/* We add pairs to t. */
+		for( int i = 0; i < n;  i++ ) t.add( k[i] );
+		  
+		/* We add to m the same data */
+		m.addAll(t);
+
+		testSets( m, t, n, 0 );
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+
+}
diff --git a/drv/AbstractBidirectionalIterator.drv b/drv/AbstractBidirectionalIterator.drv
new file mode 100644
index 0000000..1e682cb
--- /dev/null
+++ b/drv/AbstractBidirectionalIterator.drv
@@ -0,0 +1,54 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+/**  An abstract class facilitating the creation of type-specific {@linkplain it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}.
+ *
+ * <P>To create a type-specific bidirectional iterator, besides what is needed
+ * for an iterator you need both a method returning the previous element as
+ * primitive type and a method returning the previous element as an
+ * object. However, if you inherit from this class you need just one (anyone).
+ *
+ * <P>This class implements also a trivial version of {@link #back(int)} that
+ * uses type-specific methods.
+ */
+
+public abstract class KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC extends KEY_ABSTRACT_ITERATOR KEY_GENERIC implements KEY_BIDI_ITERATOR KEY_GENERIC {
+
+	protected KEY_ABSTRACT_BIDI_ITERATOR() {}
+
+#if #keys(primitive)
+
+	/** Delegates to the corresponding generic method. */
+	public KEY_TYPE PREV_KEY() { return previous().KEY_VALUE(); }
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS previous() { return KEY_CLASS.valueOf( PREV_KEY() ); }
+
+#endif
+
+	/** This method just iterates the type-specific version of {@link #previous()} for
+	 * at most <code>n</code> times, stopping if {@link
+	 * #hasPrevious()} becomes false. */
+	public int back( final int n ) { 
+		int i = n;
+		while( i-- != 0 && hasPrevious() ) PREV_KEY(); 
+		return n - i - 1;
+	}
+
+}
diff --git a/drv/AbstractBigList.drv b/drv/AbstractBigList.drv
new file mode 100644
index 0000000..d30c2e0
--- /dev/null
+++ b/drv/AbstractBigList.drv
@@ -0,0 +1,651 @@
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keys(reference)
+import it.unimi.dsi.fastutil.Stack;
+#endif
+
+import java.util.Iterator;
+import java.util.Collection;
+import java.util.NoSuchElementException;
+
+import it.unimi.dsi.fastutil.BigList;
+import it.unimi.dsi.fastutil.BigListIterator;
+
+/**  An abstract class providing basic methods for big lists implementing a type-specific big list interface. */
+
+public abstract class ABSTRACT_BIG_LIST KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC implements BIG_LIST KEY_GENERIC, STACK KEY_GENERIC {
+
+	protected ABSTRACT_BIG_LIST() {}
+	
+	/** Ensures that the given index is nonnegative and not greater than this big-list size.
+	 *
+	 * @param index an index.
+	 * @throws IndexOutOfBoundsException if the given index is negative or greater than this big-list size.
+	 */
+	protected void ensureIndex( final long index ) {
+		if ( index < 0 )  throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" );
+		if ( index > size64() ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than list size (" + ( size64() ) + ")" );
+	}
+	
+	/** Ensures that the given index is nonnegative and smaller than this big-list size.
+	 *
+	 * @param index an index.
+	 * @throws IndexOutOfBoundsException if the given index is negative or not smaller than this big-list size.
+	 */
+	protected void ensureRestrictedIndex( final long index ) {
+		if ( index < 0 )  throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" );
+		if ( index >= size64() ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + ( size64() ) + ")" );
+	}
+
+	public void add( final long index, final KEY_GENERIC_TYPE k ) {
+		throw new UnsupportedOperationException();
+	}
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		add( size64(), k );
+		return true;
+	}
+
+	public KEY_GENERIC_TYPE REMOVE_KEY( long i ) {
+		throw new UnsupportedOperationException();
+	}
+
+	public KEY_GENERIC_TYPE REMOVE_KEY( int i ) {
+		return REMOVE_KEY( (long)i );
+	}
+
+	public KEY_GENERIC_TYPE set( final long index, final KEY_GENERIC_TYPE k ) {
+		throw new UnsupportedOperationException();
+	}
+
+	public KEY_GENERIC_TYPE set( final int index, final KEY_GENERIC_TYPE k ) {
+		return set( (long)index, k );
+	}
+
+	public boolean addAll( long index, final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		ensureIndex( index );
+		int n = c.size();
+		if ( n == 0 ) return false;
+		Iterator<? extends KEY_GENERIC_CLASS> i = c.iterator();
+		while( n-- != 0 ) add( index++, i.next() );
+		return true;
+	}
+
+	public boolean addAll( int index, final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		return addAll( (long)index, c );
+	}
+	
+	/** Delegates to a more generic method. */
+	public boolean addAll( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		return addAll( size64(), c );
+	}
+
+	public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() {
+		return listIterator();
+	}
+
+	public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() {
+		return listIterator( 0L );
+	}
+
+	public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long index ) {
+		return new KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC() {
+				long pos = index, last = -1;
+							
+				public boolean hasNext() { return pos < ABSTRACT_BIG_LIST.this.size64(); }
+				public boolean hasPrevious() { return pos > 0; }
+				public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return ABSTRACT_BIG_LIST.this.GET_KEY( last = pos++ ); }
+				public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return ABSTRACT_BIG_LIST.this.GET_KEY( last = --pos ); }
+				public long nextIndex() { return pos; }
+				public long previousIndex() { return pos - 1; }
+				public void add( KEY_GENERIC_TYPE k ) { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ABSTRACT_BIG_LIST.this.add( pos++, k ); 
+					last = -1;
+				}
+				public void set( KEY_GENERIC_TYPE k ) { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ABSTRACT_BIG_LIST.this.set( last, k ); 
+				}
+				public void remove() { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ABSTRACT_BIG_LIST.this.REMOVE_KEY( last );
+					/* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */
+					if ( last < pos ) pos--;
+					last = -1;
+				}
+			};
+	}
+
+
+	public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) {
+		return listIterator( (long)index );
+	}
+
+
+	public boolean contains( final KEY_TYPE k ) {
+		return indexOf( k ) >= 0;
+	}
+
+	public long indexOf( final KEY_TYPE k ) {
+		final KEY_BIG_LIST_ITERATOR KEY_GENERIC i = listIterator();
+		KEY_GENERIC_TYPE e;
+		while( i.hasNext() ) {
+			e = i.NEXT_KEY();
+			if ( KEY_EQUALS( k, e ) ) return i.previousIndex(); 
+		}
+		return -1;
+	}
+
+	public long lastIndexOf( final KEY_TYPE k ) {
+		KEY_BIG_LIST_ITERATOR KEY_GENERIC i = listIterator( size64() );
+		KEY_GENERIC_TYPE e;
+		while( i.hasPrevious() ) {
+			e = i.PREV_KEY();
+			if ( KEY_EQUALS( k, e ) ) return i.nextIndex(); 
+		}
+		return -1;
+	}
+
+	public void size( final long size ) {
+		long i = size64();
+		if ( size > i ) while( i++ < size ) add( KEY_NULL );
+		else while( i-- != size ) remove( i );
+	}
+	
+	public void size( final int size ) {
+		size( (long)size );
+	}
+
+	public BIG_LIST KEY_GENERIC subList( final long from, final long to ) {
+		ensureIndex( from );
+		ensureIndex( to );
+		if ( from > to ) throw new IndexOutOfBoundsException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+		
+		return new SUBLIST KEY_GENERIC( this, from, to );
+	}
+	
+	/** Removes elements of this type-specific big list one-by-one. 
+	 *
+	 * <P>This is a trivial iterator-based implementation. It is expected that
+	 * implementations will override this method with a more optimized version.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param to the end index (exclusive).
+	 */
+	
+	public void removeElements( final long from, final long to ) {
+		ensureIndex( to );
+		KEY_BIG_LIST_ITERATOR KEY_GENERIC i = listIterator( from );
+		long n = to - from;
+		if ( n < 0 ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+		while( n-- != 0 ) {
+			i.NEXT_KEY();
+			i.remove();
+		}
+	}
+
+	/** Adds elements to this type-specific big list one-by-one. 
+	 *
+	 * <P>This is a trivial iterator-based implementation. It is expected that
+	 * implementations will override this method with a more optimized version.
+	 *
+	 * @param index the index at which to add elements.
+	 * @param a the big array containing the elements.
+	 * @param offset the offset of the first element to add.
+	 * @param length the number of elements to add.
+	 */
+	
+	public void addElements( long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) {
+		ensureIndex( index );
+		BIG_ARRAYS.ensureOffsetLength( a, offset, length );
+		while( length-- != 0 ) add( index++, BIG_ARRAYS.get( a, offset++ ) );
+	}
+
+	public void addElements( final long index, final KEY_GENERIC_TYPE a[][] ) {
+		addElements( index, a, 0, BIG_ARRAYS.length( a ) );
+	}
+
+	/** Copies element of this type-specific big list into the given big array one-by-one.
+	 *
+	 * <P>This is a trivial iterator-based implementation. It is expected that
+	 * implementations will override this method with a more optimized version.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param a the destination big array.
+	 * @param offset the offset into the destination big array where to store the first element copied.
+	 * @param length the number of elements to be copied.
+	 */
+	 
+	public void getElements( final long from, final KEY_TYPE a[][], long offset, long length ) {
+		KEY_BIG_LIST_ITERATOR KEY_GENERIC i = listIterator( from );
+		BIG_ARRAYS.ensureOffsetLength( a, offset, length );
+		if ( from + length > size64() ) throw new IndexOutOfBoundsException( "End index (" + ( from + length ) + ") is greater than list size (" + size64() + ")" );
+		while( length-- != 0 ) BIG_ARRAYS.set( a, offset++, i.NEXT_KEY() );
+	}
+
+	@Deprecated
+	public int size() {
+		return (int)Math.min( Integer.MAX_VALUE, size64() );
+	}
+
+#if ! #keyclass(Reference)
+	private boolean valEquals( final Object a, final Object b ) {
+		return a == null ? b == null : a.equals( b );
+	}
+#endif
+
+	@SuppressWarnings("unchecked")
+	public boolean equals( final Object o ) {
+		if ( o == this ) return true;
+		if ( ! ( o instanceof BigList ) ) return false;
+		final BigList<?> l = (BigList<?>)o;
+		long s = size64();
+		if ( s != l.size64() ) return false;
+
+		final BigListIterator<?> i1 = listIterator(), i2 = l.listIterator();
+
+#if #keyclass(Reference)
+		while( s-- !=  0 ) if ( i1.next() != i2.next() ) return false;
+#else
+		while( s-- !=  0 ) if ( ! valEquals( i1.next(), i2.next() ) ) return false;
+#endif
+		return true;
+	}
+
+#if ! #keyclass(Reference)
+    /** Compares this big list to another object. If the
+     * argument is a {@link BigList}, this method performs a lexicographical comparison; otherwise,
+     * it throws a <code>ClassCastException</code>.
+     *
+     * @param l a big list.
+     * @return if the argument is a {@link BigList}, a negative integer,
+     * zero, or a positive integer as this list is lexicographically less than, equal
+     * to, or greater than the argument.
+     * @throws ClassCastException if the argument is not a big list.
+     */
+
+	@SuppressWarnings("unchecked")
+	public int compareTo( final BigList<? extends KEY_GENERIC_CLASS> l ) {
+		if ( l == this ) return 0;
+
+		if ( l instanceof BIG_LIST ) {
+			
+			final KEY_BIG_LIST_ITERATOR KEY_GENERIC i1 = listIterator(), i2 = ((BIG_LIST KEY_GENERIC)l).listIterator();
+			int r;
+			KEY_GENERIC_TYPE e1, e2;
+			
+			while( i1.hasNext() && i2.hasNext() ) {
+				e1 = i1.NEXT_KEY();
+				e2 = i2.NEXT_KEY();
+				if ( ( r = KEY_CMP( e1, e2 ) ) != 0 ) return r;
+			}
+			return i2.hasNext() ? -1 : ( i1.hasNext() ? 1 : 0 );
+		}
+		
+		BigListIterator<? extends KEY_GENERIC_CLASS> i1 = listIterator(), i2 = l.listIterator();
+		int r;
+
+		while( i1.hasNext() && i2.hasNext() ) {
+			if ( ( r = ((Comparable<? super KEY_GENERIC_CLASS>)i1.next()).compareTo( i2.next() ) ) != 0 ) return r;
+		}
+		return i2.hasNext() ? -1 : ( i1.hasNext() ? 1 : 0 );
+	}
+#endif
+
+	/** Returns the hash code for this big list, which is identical to {@link java.util.List#hashCode()}.
+	 *
+	 * @return the hash code for this big list.
+	 */
+	public int hashCode() {
+		KEY_ITERATOR KEY_GENERIC i = iterator();
+		int h = 1;
+		long s = size64();
+		while ( s-- != 0 ) {
+			KEY_GENERIC_TYPE k = i.NEXT_KEY(); 
+			h = 31 * h + KEY2JAVAHASH( k );
+		}
+		return h;
+	}
+
+	public void push( KEY_GENERIC_TYPE o ) {
+		add( o ); 
+	}
+
+	public KEY_GENERIC_TYPE POP() {
+		if ( isEmpty() ) throw new NoSuchElementException();
+		return REMOVE_KEY( size64() - 1 );
+	}
+
+	public KEY_GENERIC_TYPE TOP() {
+		if ( isEmpty() ) throw new NoSuchElementException();
+		return GET_KEY( size64() - 1 );
+	}
+
+	public KEY_GENERIC_TYPE PEEK( int i ) {
+		return GET_KEY( size64() - 1 - i );
+	}
+
+#if #keys(primitive)
+
+	public KEY_TYPE GET_KEY( final int index ) {
+		return GET_KEY( (long)index );
+	}
+
+	public boolean rem( KEY_TYPE k ) {
+		long index = indexOf( k );
+		if ( index == -1 ) return false;
+		REMOVE_KEY( index );
+		return true;
+	}
+
+	/** Delegates to a more generic method. */
+	public boolean addAll( final long index, final COLLECTION c ) {
+		return addAll( index, (Collection<? extends KEY_CLASS>)c );
+	}
+
+	/** Delegates to a more generic method. */
+	public boolean addAll( final long index, final BIG_LIST l ) {
+		return addAll( index, (COLLECTION)l );
+	}
+
+	public boolean addAll( final COLLECTION c ) {
+		return addAll( size64(), c );
+	}
+
+	public boolean addAll( final BIG_LIST l ) {
+		return addAll( size64(), l );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public void add( final long index, final KEY_CLASS ok ) {
+		add( index, ok.KEY_VALUE() );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS set( final long index, final KEY_CLASS ok ) {
+		return KEY2OBJ( set( index, ok.KEY_VALUE() ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS get( final long index ) {
+		return KEY2OBJ( GET_KEY( index ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public long indexOf( final Object ok ) {
+		return indexOf( KEY_OBJ2TYPE( ok ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public long lastIndexOf( final Object ok ) {
+		return lastIndexOf( KEY_OBJ2TYPE( ok ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS remove( final int index ) {
+		return KEY2OBJ( REMOVE_KEY( index ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS remove( final long index ) {
+		return KEY2OBJ( REMOVE_KEY( index ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public void push( KEY_CLASS o ) {
+		push( o.KEY_VALUE() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS pop() {
+		return KEY_CLASS.valueOf( POP() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS top() {
+		return KEY_CLASS.valueOf( TOP() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS peek( int i ) {
+		return KEY_CLASS.valueOf( PEEK( i ) ); 
+	}
+
+#else
+
+	public KEY_GENERIC_CLASS get( int index ) {
+		return get( (long)index );
+	}
+
+#endif
+
+
+	public String toString() {
+		final StringBuilder s = new StringBuilder();
+		final KEY_ITERATOR KEY_GENERIC i = iterator();
+		long n = size64();
+		KEY_GENERIC_TYPE k;
+		boolean first = true;
+
+		s.append("[");
+
+		while( n-- != 0 ) {
+			if (first) first = false;
+			else s.append(", ");
+			k = i.NEXT_KEY();
+#if #keys(reference)
+			if (this == k) s.append("(this big list)"); else
+#endif
+				s.append( String.valueOf( k ) );
+		}
+
+		s.append("]");
+		return s.toString();
+	}
+
+
+	public static class SUBLIST KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements java.io.Serializable {
+    	private static final long serialVersionUID = -7046029254386353129L;
+		/** The list this sublist restricts. */
+		protected final BIG_LIST KEY_GENERIC l;
+		/** Initial (inclusive) index of this sublist. */
+		protected final long from;
+		/** Final (exclusive) index of this sublist. */
+		protected long to;
+	
+		private static final boolean ASSERTS = ASSERTS_VALUE;
+	
+		public SUBLIST( final BIG_LIST KEY_GENERIC l, final long from, final long to ) {
+			this.l = l;
+			this.from = from;
+			this.to = to;
+		}
+
+		private void assertRange() {
+			if ( ASSERTS ) {
+				assert from <= l.size64();
+				assert to <= l.size64();
+				assert to >= from;
+			}
+		}
+
+		public boolean add( final KEY_GENERIC_TYPE k ) {
+			l.add( to, k );
+			to++;
+			if ( ASSERTS ) assertRange();
+			return true;
+		}
+
+		public void add( final long index, final KEY_GENERIC_TYPE k ) {
+			ensureIndex( index );
+			l.add( from + index, k );
+			to++;
+			if ( ASSERTS ) assertRange();
+		}
+
+		public boolean addAll( final long index, final Collection<? extends KEY_GENERIC_CLASS> c ) {
+			ensureIndex( index );
+			to += c.size();
+			if ( ASSERTS ) {
+				boolean retVal = l.addAll( from + index, c );
+				assertRange();
+				return retVal;
+			}
+			return l.addAll( from + index, c );
+		}
+
+		public KEY_GENERIC_TYPE GET_KEY( long index ) {
+			ensureRestrictedIndex( index );
+			return l.GET_KEY( from + index );
+		}
+
+		public KEY_GENERIC_TYPE REMOVE_KEY( long index ) {
+			ensureRestrictedIndex( index );
+			to--;
+			return l.REMOVE_KEY( from + index );
+		}
+
+		public KEY_GENERIC_TYPE set( long index, KEY_GENERIC_TYPE k ) {
+			ensureRestrictedIndex( index );
+			return l.set( from + index, k );
+		}
+
+		public void clear() {
+			removeElements( 0, size64() );
+			if ( ASSERTS ) assertRange();
+		}
+
+		public long size64() { 
+			return to - from; 
+		}
+		
+		public void getElements( final long from, final KEY_TYPE[][] a, final long offset, final long length ) {
+			ensureIndex( from );
+			if ( from + length > size64() )  throw new IndexOutOfBoundsException( "End index (" + from + length + ") is greater than list size (" + size64() + ")" );
+			l.getElements( this.from + from, a, offset, length );
+		}
+
+		public void removeElements( final long from, final long to ) {
+			ensureIndex( from );
+			ensureIndex( to );
+			l.removeElements( this.from + from, this.from + to );
+			this.to -= ( to - from );
+			if ( ASSERTS ) assertRange();
+		}
+
+		public void addElements( final long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) {
+			ensureIndex( index );
+			l.addElements( this.from + index, a, offset, length );
+			this.to += length;
+			if ( ASSERTS ) assertRange();
+		}
+
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long index ) {
+			ensureIndex( index );
+
+			return new KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC() {
+					long pos = index, last = -1;
+							
+					public boolean hasNext() { return pos < size64(); }
+					public boolean hasPrevious() { return pos > 0; }
+					public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return l.GET_KEY( from + ( last = pos++ ) ); }
+					public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return l.GET_KEY( from + ( last = --pos ) ); }
+					public long nextIndex() { return pos; }
+					public long previousIndex() { return pos - 1; }
+					public void add( KEY_GENERIC_TYPE k ) { 
+						if ( last == -1 ) throw new IllegalStateException();
+						SUBLIST.this.add( pos++, k ); 
+						last = -1;
+						if ( ASSERTS ) assertRange();
+					}
+					public void set( KEY_GENERIC_TYPE k ) { 
+						if ( last == -1 ) throw new IllegalStateException();
+						SUBLIST.this.set( last, k ); 
+					}
+					public void remove() { 
+						if ( last == -1 ) throw new IllegalStateException();
+						SUBLIST.this.REMOVE_KEY( last );
+						/* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */
+						if ( last < pos ) pos--;
+						last = -1;
+						if ( ASSERTS ) assertRange();
+					}
+				};
+		}
+
+		public BIG_LIST KEY_GENERIC subList( final long from, final long to ) {
+			ensureIndex( from );
+			ensureIndex( to );
+			if ( from > to ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+			
+			return new SUBLIST KEY_GENERIC( this, from, to );
+		}
+
+#if #keys(primitive)
+
+		public boolean rem( KEY_TYPE k ) {
+			long index = indexOf( k );
+			if ( index == -1 ) return false;
+			to--;
+			l.REMOVE_KEY( from + index );
+			if ( ASSERTS ) assertRange();
+			return true;
+		}
+
+		public boolean remove( final Object o ) {
+			return rem( KEY_OBJ2TYPE( o ) );
+		}
+
+		public boolean addAll( final long index, final COLLECTION c ) {
+			ensureIndex( index );
+			to += c.size();
+			if ( ASSERTS ) {
+				boolean retVal = l.addAll( from + index, c );
+				assertRange();
+				return retVal;
+			}
+			return l.addAll( from + index, c );
+		}
+
+		public boolean addAll( final long index, final LIST l ) {
+			ensureIndex( index );
+			to += l.size();
+			if ( ASSERTS ) {
+				boolean retVal = this.l.addAll( from + index, l );
+				assertRange();
+				return retVal;
+			}
+			return this.l.addAll( from + index, l );
+		}
+
+#else
+		@SuppressWarnings("unchecked")
+		public boolean remove( final Object o ) {
+			long index = indexOf( o );
+			if ( index == -1 ) return false;
+			REMOVE_KEY( index );
+			return true;
+		}
+#endif
+
+	}
+
+}
diff --git a/drv/AbstractBigListIterator.drv b/drv/AbstractBigListIterator.drv
new file mode 100644
index 0000000..6112b29
--- /dev/null
+++ b/drv/AbstractBigListIterator.drv
@@ -0,0 +1,66 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.ListIterator;
+import it.unimi.dsi.fastutil.BigListIterator;
+
+/**  An abstract class facilitating the creation of type-specific {@linkplain it.unimi.dsi.fastutil.BigListIterator big-list iterators}.
+ *
+ * <p>This implementation provides (deprecated) implementations of {@link ListIterator#previousIndex()} and {@link ListIterator#nextIndex()} that
+ * just invoke the corresponding {@link BigListIterator} methods.
+ *
+ * @see java.util.ListIterator
+ * @see it.unimi.dsi.fastutil.BigListIterator
+ */
+
+public abstract class KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC extends KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC implements KEY_BIG_LIST_ITERATOR KEY_GENERIC {
+
+	protected KEY_ABSTRACT_BIG_LIST_ITERATOR() {}
+
+#if #keys(primitive)
+	/** Delegates to the corresponding type-specific method. */
+	public void set( KEY_GENERIC_CLASS ok ) { set( ok.KEY_VALUE() ); }
+	/** Delegates to the corresponding type-specific method. */
+	public void add( KEY_GENERIC_CLASS ok ) { add( ok.KEY_VALUE() ); }
+#endif
+
+	/** This method just throws an  {@link UnsupportedOperationException}. */
+	public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+	/** This method just throws an  {@link UnsupportedOperationException}. */
+	public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+
+	/** This method just iterates the type-specific version of {@link #next()} for at most
+	 * <code>n</code> times, stopping if {@link #hasNext()} becomes false.*/
+
+	public long skip( final long n ) { 
+		long i = n;
+		while( i-- != 0 && hasNext() ) NEXT_KEY(); 
+		return n - i - 1;
+	}
+
+	/** This method just iterates the type-specific version of {@link #previous()} for
+	 * at most <code>n</code> times, stopping if {@link
+	 * #hasPrevious()} becomes false. */
+	public long back( final long n ) { 
+		long i = n;
+		while( i-- != 0 && hasPrevious() ) PREV_KEY(); 
+		return n - i - 1;
+	}
+
+}
diff --git a/drv/AbstractCollection.drv b/drv/AbstractCollection.drv
new file mode 100644
index 0000000..a84ecc3
--- /dev/null
+++ b/drv/AbstractCollection.drv
@@ -0,0 +1,282 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.AbstractCollection;
+import java.util.Collection;
+import java.util.Iterator;
+
+/** An abstract class providing basic methods for collections implementing a type-specific interface.
+ *
+ * <P>In particular, this class provide {@link #iterator()}, <code>add()</code>, {@link #remove(Object)} and
+ * {@link #contains(Object)} methods that just call the type-specific counterpart. 
+ */
+
+public abstract class ABSTRACT_COLLECTION KEY_GENERIC extends AbstractCollection<KEY_GENERIC_CLASS> implements COLLECTION KEY_GENERIC {
+
+	protected ABSTRACT_COLLECTION() {}
+
+#if #keys(primitive)
+
+	public KEY_TYPE[] toArray( KEY_TYPE a[] ) {
+		return TO_KEY_ARRAY( a );
+	}
+
+	public KEY_TYPE[] TO_KEY_ARRAY() {
+		return TO_KEY_ARRAY( null );
+	}
+
+	public KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE a[] ) {
+		if ( a == null || a.length < size() ) a = new KEY_TYPE[ size() ];
+		ITERATORS.unwrap( iterator(), a );
+		return a;
+	}
+
+	/** Adds all elements of the given type-specific collection to this collection.
+	 *
+	 * @param c a type-specific collection.
+	 * @return <code>true</code> if this collection changed as a result of the call.
+	 */
+
+	public boolean addAll( COLLECTION c ) {
+		boolean retVal = false;
+		final KEY_ITERATOR i = c.iterator();
+		int n = c.size();
+
+		while( n-- != 0 ) if ( add( i.NEXT_KEY() ) ) retVal = true;
+		return retVal;
+	}
+
+	/** Checks whether this collection contains all elements from the given type-specific collection.
+	 *
+	 * @param c a type-specific collection.
+	 * @return <code>true</code> if this collection contains all elements of the argument.
+	 */
+
+	public boolean containsAll( COLLECTION c ) {
+		final KEY_ITERATOR i = c.iterator();
+		int n = c.size();
+
+		while( n-- != 0 ) if ( ! contains( i.NEXT_KEY() ) ) return false;
+
+		return true;
+	}
+
+	/** Retains in this collection only elements from the given type-specific collection.
+	 *
+	 * @param c a type-specific collection.
+	 * @return <code>true</code> if this collection changed as a result of the call.
+	 */
+
+	public boolean retainAll( COLLECTION c ) {
+		boolean retVal = false;
+		int n = size();
+
+		final KEY_ITERATOR i = iterator();
+
+		while( n-- != 0 ) {
+			if ( ! c.contains( i.NEXT_KEY() ) ) {
+				i.remove();
+				retVal = true;
+			}
+		}
+
+		return retVal;
+	}
+
+	/** Remove from this collection all elements in the given type-specific collection.
+	 *
+	 * @param c a type-specific collection.
+	 * @return <code>true</code> if this collection changed as a result of the call.
+	 */
+
+	public boolean removeAll( COLLECTION c ) {
+		boolean retVal = false;
+		int n = c.size();
+
+		final KEY_ITERATOR i = c.iterator();
+
+		while( n-- != 0 ) if ( rem( i.NEXT_KEY() ) ) retVal = true;
+
+		return retVal;
+	}
+
+#endif
+
+	public Object[] toArray() {
+		final Object[] a = new Object[ size() ];
+		it.unimi.dsi.fastutil.objects.ObjectIterators.unwrap( iterator(), a );
+		return a;
+	}
+
+	@SuppressWarnings("unchecked")
+	public <T> T[] toArray( T[] a ) {
+		if ( a.length < size() ) a = (T[])java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), size() );
+		it.unimi.dsi.fastutil.objects.ObjectIterators.unwrap( iterator(), a );
+		return a;
+	}
+
+	/** Adds all elements of the given collection to this collection.
+	 *
+	 * @param c a collection.
+	 * @return <code>true</code> if this collection changed as a result of the call.
+	 */
+
+	public boolean addAll( Collection<? extends KEY_GENERIC_CLASS> c ) {
+		boolean retVal = false;
+		final Iterator<? extends KEY_GENERIC_CLASS> i = c.iterator();
+		int n = c.size();
+
+		while( n-- != 0 ) if ( add( i.next() ) ) retVal = true;
+		return retVal;
+	}
+
+	public boolean add( KEY_GENERIC_TYPE k ) {
+		throw new UnsupportedOperationException();
+	}
+	 
+	/** Delegates to the new covariantly stronger generic method. */
+	
+	@Deprecated
+	public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() {
+		return iterator();
+	}
+
+	public abstract KEY_ITERATOR KEY_GENERIC iterator();
+
+#if #keys(primitive)
+
+	/** Delegates to the type-specific <code>rem()</code> method. */
+	public boolean remove( Object ok ) {
+		return rem( KEY_OBJ2TYPE( ok ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public boolean add( final KEY_CLASS o ) {
+		return add( o.KEY_VALUE() );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public boolean rem( final Object o ) {
+		return rem( KEY_OBJ2TYPE(o) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public boolean contains( final Object o ) {
+		return contains( KEY_OBJ2TYPE(o) );
+	}
+
+	public boolean contains( final KEY_TYPE k ) {
+		final KEY_ITERATOR iterator = iterator();
+		while ( iterator.hasNext() ) if ( k == iterator.NEXT_KEY() ) return true;
+        return false;
+    }
+
+	public boolean rem( final KEY_TYPE k ) {
+		final KEY_ITERATOR iterator = iterator();
+		while ( iterator.hasNext() ) 
+			if ( k == iterator.NEXT_KEY() ) {
+				iterator.remove();
+				return true;
+			}
+        return false;
+    }
+
+#endif
+
+	/** Checks whether this collection contains all elements from the given collection.
+	 *
+	 * @param c a collection.
+	 * @return <code>true</code> if this collection contains all elements of the argument.
+	 */
+
+	public boolean containsAll( Collection<?> c ) {
+		int n = c.size();
+
+		final Iterator<?> i = c.iterator();
+		while( n-- != 0 ) if ( ! contains( i.next() ) ) return false;
+
+		return true;
+	}
+
+
+	/** Retains in this collection only elements from the given collection.
+	 *
+	 * @param c a collection.
+	 * @return <code>true</code> if this collection changed as a result of the call.
+	 */
+
+	public boolean retainAll( Collection<?> c ) {
+		boolean retVal = false;
+		int n = size();
+
+		final Iterator<?> i = iterator();
+		while( n-- != 0 ) {
+			if ( ! c.contains( i.next() ) ) {
+				i.remove(); 
+				retVal = true;
+			}
+		}
+
+		return retVal;
+	}
+
+	/** Remove from this collection all elements in the given collection.
+	 * If the collection is an instance of this class, it uses faster iterators.
+	 *
+	 * @param c a collection.
+	 * @return <code>true</code> if this collection changed as a result of the call.
+	 */
+
+	public boolean removeAll( Collection<?> c ) {
+		boolean retVal = false;
+		int n = c.size();
+
+		final Iterator<?> i = c.iterator();
+		while( n-- != 0 ) if ( remove( i.next() ) ) retVal = true;
+
+		return retVal;
+	}
+
+	public boolean isEmpty() {
+		return size() == 0;
+	}
+
+	public String toString() {
+		final StringBuilder s = new StringBuilder();
+		final KEY_ITERATOR KEY_GENERIC i = iterator();
+		int n = size();
+		KEY_TYPE k;
+		boolean first = true;
+
+		s.append("{");
+
+		while(n-- != 0) {
+			if (first) first = false;
+			else s.append(", ");
+			k = i.NEXT_KEY();
+#if #keys(reference)
+			if (this == k) s.append("(this collection)"); else
+#endif
+				s.append(String.valueOf(k));
+		}
+
+		s.append("}");
+		return s.toString();
+	}
+}
diff --git a/drv/AbstractComparator.drv b/drv/AbstractComparator.drv
new file mode 100644
index 0000000..a77d232
--- /dev/null
+++ b/drv/AbstractComparator.drv
@@ -0,0 +1,39 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+/**  An abstract class facilitating the creation of type-specific {@linkplain java.util.Comparator comparators}.
+ *
+ * <P>To create a type-specific comparator you need both a method comparing
+ * primitive types and a method comparing objects. However, if you have the
+ * first one you can just inherit from this class and get for free the second
+ * one.
+ * 
+ * @see java.util.Comparator
+ */
+
+public abstract class KEY_ABSTRACT_COMPARATOR KEY_GENERIC implements KEY_COMPARATOR KEY_GENERIC {
+
+	protected KEY_ABSTRACT_COMPARATOR() {}
+
+	public int compare( KEY_GENERIC_CLASS ok1, KEY_GENERIC_CLASS ok2 ) {
+		return compare( ok1.KEY_VALUE(), ok2.KEY_VALUE() );
+	}
+
+	public abstract int compare( KEY_TYPE k1, KEY_TYPE k2 );
+}
diff --git a/drv/AbstractFunction.drv b/drv/AbstractFunction.drv
new file mode 100644
index 0000000..8537ece
--- /dev/null
+++ b/drv/AbstractFunction.drv
@@ -0,0 +1,114 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+/** An abstract class providing basic methods for functions implementing a type-specific interface.
+ *
+ * <P>Optional operations just throw an {@link
+ * UnsupportedOperationException}. Generic versions of accessors delegate to
+ * the corresponding type-specific counterparts following the interface rules
+ * (they take care of returning <code>null</code> on a missing key).
+ *
+ * <P>This class handles directly a default return
+ * value (including {@linkplain #defaultReturnValue() methods to access
+ * it}). Instances of classes inheriting from this class have just to return
+ * <code>defRetValue</code> to denote lack of a key in type-specific methods. The value
+ * is serialized.
+ *
+ * <P>Implementing subclasses have just to provide type-specific <code>get()</code>,
+ * type-specific <code>containsKey()</code>, and <code>size()</code> methods.
+ *
+ */
+
+public abstract class ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements FUNCTION KEY_VALUE_GENERIC, java.io.Serializable {
+
+	private static final long serialVersionUID = -4940583368468432370L;
+
+	protected ABSTRACT_FUNCTION() {}
+
+	/**
+	 * The default return value for <code>get()</code>, <code>put()</code> and
+	 * <code>remove()</code>.  
+	 */
+
+	protected VALUE_GENERIC_TYPE defRetValue;
+	
+	public void defaultReturnValue( final VALUE_GENERIC_TYPE rv ) {
+		defRetValue = rv;
+	}
+
+	public VALUE_GENERIC_TYPE defaultReturnValue() {
+		return defRetValue;
+	}
+
+	public VALUE_GENERIC_TYPE put( KEY_GENERIC_TYPE key, VALUE_GENERIC_TYPE value ) {
+		throw new UnsupportedOperationException();
+	}
+
+	public VALUE_GENERIC_TYPE REMOVE_VALUE( KEY_TYPE key ) {
+		throw new UnsupportedOperationException();
+	}
+
+	public void clear() {
+		throw new UnsupportedOperationException();
+	}
+
+
+#if #keys(primitive)
+	public boolean containsKey( final Object ok ) {
+		return containsKey( KEY_OBJ2TYPE( ok ) );
+	}
+#endif
+
+#if #keys(primitive) || #values(primitive)
+                                                                                                                                             
+	/** Delegates to the corresponding type-specific method, taking care of returning <code>null</code> on a missing key.
+	 *
+	 * <P>This method must check whether the provided key is in the map using <code>containsKey()</code>. Thus,
+	 * it probes the map <em>twice</em>. Implementors of subclasses should override it with a more efficient method.
+	 */
+	public VALUE_GENERIC_CLASS get( final Object ok ) {
+		final KEY_TYPE k = KEY_OBJ2TYPE( ok );
+		return containsKey( k ) ? VALUE2OBJ( GET_VALUE( k ) ) : null;
+	}
+                                                                                                                                             
+	/** Delegates to the corresponding type-specific method, taking care of returning <code>null</code> on a missing key. 
+	 *
+	 * <P>This method must check whether the provided key is in the map using <code>containsKey()</code>. Thus,
+	 * it probes the map <em>twice</em>. Implementors of subclasses should override it with a more efficient method.
+	 */
+	public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) {
+		final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE( ok );
+		final boolean containsKey = containsKey( k );
+		final VALUE_GENERIC_TYPE v = put( k, VALUE_CLASS2TYPE( ov ) );
+		return containsKey ? VALUE2OBJ( v ) : null;
+	}
+                                                                                                                                             
+	/** Delegates to the corresponding type-specific method, taking care of returning <code>null</code> on a missing key. 
+	 *
+	 * <P>This method must check whether the provided key is in the map using <code>containsKey()</code>. Thus,
+	 * it probes the map <em>twice</em>. Implementors of subclasses should override it with a more efficient method.
+	 */
+	public VALUE_GENERIC_CLASS remove( final Object ok ) {
+		final KEY_TYPE k = KEY_OBJ2TYPE( ok );
+		final boolean containsKey = containsKey( k );
+		final VALUE_GENERIC_TYPE v = REMOVE_VALUE( k );
+		return containsKey ? VALUE2OBJ( v ) : null;
+	}
+#endif
+}
\ No newline at end of file
diff --git a/drv/AbstractIterator.drv b/drv/AbstractIterator.drv
new file mode 100644
index 0000000..8dcd25e
--- /dev/null
+++ b/drv/AbstractIterator.drv
@@ -0,0 +1,58 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+/**  An abstract class facilitating the creation of type-specific iterators.
+ *
+ * <P>To create a type-specific iterator you need both a method returning the
+ * next element as primitive type and a method returning the next element as an
+ * object. However, if you inherit from this class you need just one (anyone).
+ *
+ * <P>This class implements also a trivial version of {@link #skip(int)} that uses
+ * type-specific methods; moreover, {@link #remove()} will throw an {@link
+ * UnsupportedOperationException}.
+ *
+ * @see java.util.Iterator
+ */
+
+public abstract class KEY_ABSTRACT_ITERATOR KEY_GENERIC implements KEY_ITERATOR KEY_GENERIC {
+
+	protected KEY_ABSTRACT_ITERATOR() {}
+
+#if #keys(primitive)
+
+	/** Delegates to the corresponding generic method. */
+	public KEY_TYPE NEXT_KEY() { return next().KEY_VALUE(); }
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS next() { return KEY_CLASS.valueOf( NEXT_KEY() ); }
+
+#endif
+
+	/** This method just throws an  {@link UnsupportedOperationException}. */
+	public void remove() { throw new UnsupportedOperationException(); }
+
+	/** This method just iterates the type-specific version of {@link #next()} for at most
+	 * <code>n</code> times, stopping if {@link #hasNext()} becomes false.*/
+
+	public int skip( final int n ) { 
+		int i = n;
+		while( i-- != 0 && hasNext() ) NEXT_KEY(); 
+		return n - i - 1;
+	}
+}
diff --git a/drv/AbstractList.drv b/drv/AbstractList.drv
new file mode 100644
index 0000000..91ec85c
--- /dev/null
+++ b/drv/AbstractList.drv
@@ -0,0 +1,642 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keys(reference)
+import it.unimi.dsi.fastutil.Stack;
+#endif
+
+import java.util.List;
+import java.util.Iterator;
+import java.util.ListIterator;
+import java.util.Collection;
+import java.util.NoSuchElementException;
+
+/**  An abstract class providing basic methods for lists implementing a type-specific list interface.
+ *
+ * <P>As an additional bonus, this class implements on top of the list operations a type-specific stack.
+ */
+
+public abstract class ABSTRACT_LIST KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC implements LIST KEY_GENERIC, STACK KEY_GENERIC {
+
+	protected ABSTRACT_LIST() {}
+	
+	/** Ensures that the given index is nonnegative and not greater than the list size.
+	 *
+	 * @param index an index.
+	 * @throws IndexOutOfBoundsException if the given index is negative or greater than the list size.
+	 */
+	protected void ensureIndex( final int index ) {
+		if ( index < 0 )  throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" );
+		if ( index > size() ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than list size (" + ( size() ) + ")" );
+	}
+	
+	/** Ensures that the given index is nonnegative and smaller than the list size.
+	 *
+	 * @param index an index.
+	 * @throws IndexOutOfBoundsException if the given index is negative or not smaller than the list size.
+	 */
+	protected void ensureRestrictedIndex( final int index ) {
+		if ( index < 0 )  throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" );
+		if ( index >= size() ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + ( size() ) + ")" );
+	}
+
+	public void add( final int index, final KEY_GENERIC_TYPE k ) {
+		throw new UnsupportedOperationException();
+	}
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		add( size(), k );
+		return true;
+	}
+
+	public KEY_GENERIC_TYPE REMOVE_KEY( int i ) {
+		throw new UnsupportedOperationException();
+	}
+
+	public KEY_GENERIC_TYPE set( final int index, final KEY_GENERIC_TYPE k ) {
+		throw new UnsupportedOperationException();
+	}
+
+	public boolean addAll( int index, final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		ensureIndex( index );
+		int n = c.size();
+		if ( n == 0 ) return false;
+		Iterator<? extends KEY_GENERIC_CLASS> i = c.iterator();
+		while( n-- != 0 ) add( index++, i.next() );
+		return true;
+	}
+
+	/** Delegates to a more generic method. */
+	public boolean addAll( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		return addAll( size(), c );
+	}
+
+	/** Delegates to the new covariantly stronger generic method. */
+	
+	@Deprecated
+	public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD() {
+		return listIterator();
+	}
+
+	/** Delegates to the new covariantly stronger generic method. */
+	
+	@Deprecated
+	public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( final int index ) {
+		return listIterator( index );
+	}
+
+	public KEY_LIST_ITERATOR KEY_GENERIC iterator() {
+		return listIterator();
+	}
+
+	public KEY_LIST_ITERATOR KEY_GENERIC listIterator() {
+		return listIterator( 0 );
+	}
+
+	public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) {
+		return new KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC() {
+				int pos = index, last = -1;
+							
+				public boolean hasNext() { return pos < ABSTRACT_LIST.this.size(); }
+				public boolean hasPrevious() { return pos > 0; }
+				public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return ABSTRACT_LIST.this.GET_KEY( last = pos++ ); }
+				public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return ABSTRACT_LIST.this.GET_KEY( last = --pos ); }
+				public int nextIndex() { return pos; }
+				public int previousIndex() { return pos - 1; }
+				public void add( KEY_GENERIC_TYPE k ) { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ABSTRACT_LIST.this.add( pos++, k ); 
+					last = -1;
+				}
+				public void set( KEY_GENERIC_TYPE k ) { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ABSTRACT_LIST.this.set( last, k ); 
+				}
+				public void remove() { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ABSTRACT_LIST.this.REMOVE_KEY( last );
+					/* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */
+					if ( last < pos ) pos--;
+					last = -1;
+				}
+			};
+	}
+
+
+
+	public boolean contains( final KEY_TYPE k ) {
+		return indexOf( k ) >= 0;
+	}
+
+	public int indexOf( final KEY_TYPE k ) {
+		final KEY_LIST_ITERATOR KEY_GENERIC i = listIterator();
+		KEY_GENERIC_TYPE e;
+		while( i.hasNext() ) {
+			e = i.NEXT_KEY();
+			if ( KEY_EQUALS( k, e ) ) return i.previousIndex(); 
+		}
+		return -1;
+	}
+
+	public int lastIndexOf( final KEY_TYPE k ) {
+		KEY_LIST_ITERATOR KEY_GENERIC i = listIterator( size() );
+		KEY_GENERIC_TYPE e;
+		while( i.hasPrevious() ) {
+			e = i.PREV_KEY();
+			if ( KEY_EQUALS( k, e ) ) return i.nextIndex(); 
+		}
+		return -1;
+	}
+
+	public void size( final int size ) {
+		int i = size();
+		if ( size > i ) while( i++ < size ) add( KEY_NULL );
+		else while( i-- != size ) remove( i );
+	}		
+
+
+	public LIST KEY_GENERIC subList( final int from, final int to ) {
+		ensureIndex( from );
+		ensureIndex( to );
+		if ( from > to ) throw new IndexOutOfBoundsException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+		
+		return new SUBLIST KEY_GENERIC( this, from, to );
+	}
+	
+	/** Delegates to the new covariantly stronger generic method. */
+
+	@Deprecated
+	public LIST KEY_GENERIC SUBLIST_METHOD( final int from, final int to ) {
+		return subList( from, to );
+	}
+
+	/** Removes elements of this type-specific list one-by-one. 
+	 *
+	 * <P>This is a trivial iterator-based implementation. It is expected that
+	 * implementations will override this method with a more optimized version.
+	 *
+	 *
+	 * @param from the start index (inclusive).
+	 * @param to the end index (exclusive).
+	 */
+	
+	public void removeElements( final int from, final int to ) {
+		ensureIndex( to );
+		KEY_LIST_ITERATOR KEY_GENERIC i = listIterator( from );
+		int n = to - from;
+		if ( n < 0 ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+		while( n-- != 0 ) {
+			i.NEXT_KEY();
+			i.remove();
+		}
+	}
+
+	/** Adds elements to this type-specific list one-by-one. 
+	 *
+	 * <P>This is a trivial iterator-based implementation. It is expected that
+	 * implementations will override this method with a more optimized version.
+	 *
+	 * @param index the index at which to add elements.
+	 * @param a the array containing the elements.
+	 * @param offset the offset of the first element to add.
+	 * @param length the number of elements to add.
+	 */
+	
+	public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) {
+		ensureIndex( index );
+		if ( offset < 0 ) throw new ArrayIndexOutOfBoundsException( "Offset (" + offset + ") is negative" );
+		if ( offset + length > a.length ) throw new ArrayIndexOutOfBoundsException( "End index (" + ( offset + length ) + ") is greater than array length (" + a.length + ")" );
+		while( length-- != 0 ) add( index++, a[ offset++ ] );
+	}
+
+	public void addElements( final int index, final KEY_GENERIC_TYPE a[] ) {
+		addElements( index, a, 0, a.length );
+	}
+
+	/** Copies element of this type-specific list into the given array one-by-one.
+	 *
+	 * <P>This is a trivial iterator-based implementation. It is expected that
+	 * implementations will override this method with a more optimized version.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param a the destination array.
+	 * @param offset the offset into the destination array where to store the first element copied.
+	 * @param length the number of elements to be copied.
+	 */
+	 
+	public void getElements( final int from, final KEY_TYPE a[], int offset, int length ) {
+		KEY_LIST_ITERATOR KEY_GENERIC i = listIterator( from );
+		if ( offset < 0 ) throw new ArrayIndexOutOfBoundsException( "Offset (" + offset + ") is negative" );
+		if ( offset + length > a.length ) throw new ArrayIndexOutOfBoundsException( "End index (" + ( offset + length ) + ") is greater than array length (" + a.length + ")" );
+		if ( from + length > size() ) throw new IndexOutOfBoundsException( "End index (" + ( from + length ) + ") is greater than list size (" + size() + ")" );
+		while( length-- != 0 ) a[ offset++ ] = i.NEXT_KEY();
+	}
+
+#if ! #keyclass(Reference)
+	private boolean valEquals( final Object a, final Object b ) {
+		return a == null ? b == null : a.equals( b );
+	}
+#endif
+
+	public boolean equals( final Object o ) {
+		if ( o == this ) return true;
+		if ( ! ( o instanceof List ) ) return false;
+		final List<?> l = (List<?>)o;
+		int s = size();
+		if ( s != l.size() ) return false;
+
+		final ListIterator<?> i1 = listIterator(), i2 = l.listIterator();
+
+#if #keyclass(Reference)
+		while( s-- !=  0 ) if ( i1.next() != i2.next() ) return false;
+#else
+		while( s-- !=  0 ) if ( ! valEquals( i1.next(), i2.next() ) ) return false;
+#endif
+		return true;
+	}
+
+#if ! #keyclass(Reference)
+    /** Compares this list to another object. If the
+     * argument is a {@link java.util.List}, this method performs a lexicographical comparison; otherwise,
+     * it throws a <code>ClassCastException</code>.
+     *
+     * @param l a list.
+     * @return if the argument is a {@link java.util.List}, a negative integer,
+     * zero, or a positive integer as this list is lexicographically less than, equal
+     * to, or greater than the argument.
+     * @throws ClassCastException if the argument is not a list.
+     */
+
+	@SuppressWarnings("unchecked")
+	public int compareTo( final List<? extends KEY_GENERIC_CLASS> l ) {
+		if ( l == this ) return 0;
+
+		if ( l instanceof LIST ) {
+			
+			final KEY_LIST_ITERATOR KEY_GENERIC i1 = listIterator(), i2 = ((LIST KEY_GENERIC)l).listIterator();
+			int r;
+			KEY_GENERIC_TYPE e1, e2;
+			
+			while( i1.hasNext() && i2.hasNext() ) {
+				e1 = i1.NEXT_KEY();
+				e2 = i2.NEXT_KEY();
+				if ( ( r = KEY_CMP( e1, e2 ) ) != 0 ) return r;
+			}
+			return i2.hasNext() ? -1 : ( i1.hasNext() ? 1 : 0 );
+		}
+		
+		ListIterator<? extends KEY_GENERIC_CLASS> i1 = listIterator(), i2 = l.listIterator();
+		int r;
+
+		while( i1.hasNext() && i2.hasNext() ) {
+			if ( ( r = ((Comparable<? super KEY_GENERIC_CLASS>)i1.next()).compareTo( i2.next() ) ) != 0 ) return r;
+		}
+		return i2.hasNext() ? -1 : ( i1.hasNext() ? 1 : 0 );
+	}
+#endif
+
+	/** Returns the hash code for this list, which is identical to {@link java.util.List#hashCode()}.
+	 *
+	 * @return the hash code for this list.
+	 */
+	public int hashCode() {
+		KEY_ITERATOR KEY_GENERIC i = iterator();
+		int h = 1, s = size();
+		while ( s-- != 0 ) {
+			KEY_GENERIC_TYPE k = i.NEXT_KEY(); 
+			h = 31 * h + KEY2JAVAHASH( k );
+		}
+		return h;
+	}
+
+	
+	public void push( KEY_GENERIC_TYPE o ) {
+		add( o ); 
+	}
+
+	public KEY_GENERIC_TYPE POP() {
+		if ( isEmpty() ) throw new NoSuchElementException();
+		return REMOVE_KEY( size() - 1 );
+	}
+
+	public KEY_GENERIC_TYPE TOP() {
+		if ( isEmpty() ) throw new NoSuchElementException();
+		return GET_KEY( size() - 1 );
+	}
+
+	public KEY_GENERIC_TYPE PEEK( int i ) {
+		return GET_KEY( size() - 1 - i );
+	}
+
+#if #keys(primitive)
+
+	public boolean rem( KEY_TYPE k ) {
+		int index = indexOf( k );
+		if ( index == -1 ) return false;
+		REMOVE_KEY( index );
+		return true;
+	}
+
+	/** Delegates to <code>rem()</code>. */
+	public boolean remove( final Object o ) {
+		return rem( KEY_OBJ2TYPE( o ) );
+	}
+
+	/** Delegates to a more generic method. */
+	public boolean addAll( final int index, final COLLECTION c ) {
+		return addAll( index, (Collection<? extends KEY_CLASS>)c );
+	}
+
+	/** Delegates to a more generic method. */
+	public boolean addAll( final int index, final LIST l ) {
+		return addAll( index, (COLLECTION)l );
+	}
+
+	public boolean addAll( final COLLECTION c ) {
+		return addAll( size(), c );
+	}
+
+	public boolean addAll( final LIST l ) {
+		return addAll( size(), l );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public void add( final int index, final KEY_CLASS ok ) {
+		add( index, ok.KEY_VALUE() );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS set( final int index, final KEY_CLASS ok ) {
+		return KEY2OBJ( set( index, ok.KEY_VALUE() ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS get( final int index ) {
+		return KEY2OBJ( GET_KEY( index ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public int indexOf( final Object ok) {
+		return indexOf( KEY_OBJ2TYPE( ok ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public int lastIndexOf( final Object ok ) {
+		return lastIndexOf( KEY_OBJ2TYPE( ok ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS remove( final int index ) {
+		return KEY2OBJ( REMOVE_KEY( index ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public void push( KEY_CLASS o ) {
+		push( o.KEY_VALUE() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS pop() {
+		return KEY_CLASS.valueOf( POP() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS top() {
+		return KEY_CLASS.valueOf( TOP() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_CLASS peek( int i ) {
+		return KEY_CLASS.valueOf( PEEK( i ) ); 
+	}
+
+#endif
+
+
+	public String toString() {
+		final StringBuilder s = new StringBuilder();
+		final KEY_ITERATOR KEY_GENERIC i = iterator();
+		int n = size();
+		KEY_GENERIC_TYPE k;
+		boolean first = true;
+
+		s.append("[");
+
+		while( n-- != 0 ) {
+			if (first) first = false;
+			else s.append(", ");
+			k = i.NEXT_KEY();
+#if #keys(reference)
+			if (this == k) s.append("(this list)"); else
+#endif
+				s.append( String.valueOf( k ) );
+		}
+
+		s.append("]");
+		return s.toString();
+	}
+
+
+	public static class SUBLIST KEY_GENERIC extends ABSTRACT_LIST KEY_GENERIC implements java.io.Serializable {
+    	private static final long serialVersionUID = -7046029254386353129L;
+		/** The list this sublist restricts. */
+		protected final LIST KEY_GENERIC l;
+		/** Initial (inclusive) index of this sublist. */
+		protected final int from;
+		/** Final (exclusive) index of this sublist. */
+		protected int to;
+	
+		private static final boolean ASSERTS = ASSERTS_VALUE;
+	
+		public SUBLIST( final LIST KEY_GENERIC l, final int from, final int to ) {
+			this.l = l;
+			this.from = from;
+			this.to = to;
+		}
+
+		private void assertRange() {
+			if ( ASSERTS ) {
+				assert from <= l.size();
+				assert to <= l.size();
+				assert to >= from;
+			}
+		}
+
+		public boolean add( final KEY_GENERIC_TYPE k ) {
+			l.add( to, k );
+			to++;
+			if ( ASSERTS ) assertRange();
+			return true;
+		}
+
+		public void add( final int index, final KEY_GENERIC_TYPE k ) {
+			ensureIndex( index );
+			l.add( from + index, k );
+			to++;
+			if ( ASSERTS ) assertRange();
+		}
+
+		public boolean addAll( final int index, final Collection<? extends KEY_GENERIC_CLASS> c ) {
+			ensureIndex( index );
+			to += c.size();
+			if ( ASSERTS ) {
+				boolean retVal = l.addAll( from + index, c );
+				assertRange();
+				return retVal;
+			}
+			return l.addAll( from + index, c );
+		}
+
+		public KEY_GENERIC_TYPE GET_KEY( int index ) {
+			ensureRestrictedIndex( index );
+			return l.GET_KEY( from + index );
+		}
+
+		public KEY_GENERIC_TYPE REMOVE_KEY( int index ) {
+			ensureRestrictedIndex( index );
+			to--;
+			return l.REMOVE_KEY( from + index );
+		}
+
+		public KEY_GENERIC_TYPE set( int index, KEY_GENERIC_TYPE k ) {
+			ensureRestrictedIndex( index );
+			return l.set( from + index, k );
+		}
+
+		public void clear() {
+			removeElements( 0, size() );
+			if ( ASSERTS ) assertRange();
+		}
+
+		public int size() { 
+			return to - from; 
+		}
+		
+		public void getElements( final int from, final KEY_TYPE[] a, final int offset, final int length ) {
+			ensureIndex( from );
+			if ( from + length > size() )  throw new IndexOutOfBoundsException( "End index (" + from + length + ") is greater than list size (" + size() + ")" );
+			l.getElements( this.from + from, a, offset, length );
+		}
+
+		public void removeElements( final int from, final int to ) {
+			ensureIndex( from );
+			ensureIndex( to );
+			l.removeElements( this.from + from, this.from + to );
+			this.to -= ( to - from );
+			if ( ASSERTS ) assertRange();
+		}
+
+		public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) {
+			ensureIndex( index );
+			l.addElements( this.from + index, a, offset, length );
+			this.to += length;
+			if ( ASSERTS ) assertRange();
+		}
+
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) {
+			ensureIndex( index );
+
+			return new KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC() {
+					int pos = index, last = -1;
+							
+					public boolean hasNext() { return pos < size(); }
+					public boolean hasPrevious() { return pos > 0; }
+					public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return l.GET_KEY( from + ( last = pos++ ) ); }
+					public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return l.GET_KEY( from + ( last = --pos ) ); }
+					public int nextIndex() { return pos; }
+					public int previousIndex() { return pos - 1; }
+					public void add( KEY_GENERIC_TYPE k ) { 
+						if ( last == -1 ) throw new IllegalStateException();
+						SUBLIST.this.add( pos++, k ); 
+						last = -1;
+						if ( ASSERTS ) assertRange();
+					}
+					public void set( KEY_GENERIC_TYPE k ) { 
+						if ( last == -1 ) throw new IllegalStateException();
+						SUBLIST.this.set( last, k ); 
+					}
+					public void remove() { 
+						if ( last == -1 ) throw new IllegalStateException();
+						SUBLIST.this.REMOVE_KEY( last );
+						/* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */
+						if ( last < pos ) pos--;
+						last = -1;
+						if ( ASSERTS ) assertRange();
+					}
+				};
+		}
+
+		public LIST KEY_GENERIC subList( final int from, final int to ) {
+			ensureIndex( from );
+			ensureIndex( to );
+			if ( from > to ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+			
+			return new SUBLIST KEY_GENERIC( this, from, to );
+		}
+
+#if #keys(primitive)
+
+		public boolean rem( KEY_TYPE k ) {
+			int index = indexOf( k );
+			if ( index == -1 ) return false;
+			to--;
+			l.REMOVE_KEY( from + index );
+			if ( ASSERTS ) assertRange();
+			return true;
+		}
+
+		public boolean remove( final Object o ) {
+			return rem( KEY_OBJ2TYPE( o ) );
+		}
+
+		public boolean addAll( final int index, final COLLECTION c ) {
+			ensureIndex( index );
+			to += c.size();
+			if ( ASSERTS ) {
+				boolean retVal = l.addAll( from + index, c );
+				assertRange();
+				return retVal;
+			}
+			return l.addAll( from + index, c );
+		}
+
+		public boolean addAll( final int index, final LIST l ) {
+			ensureIndex( index );
+			to += l.size();
+			if ( ASSERTS ) {
+				boolean retVal = this.l.addAll( from + index, l );
+				assertRange();
+				return retVal;
+			}
+			return this.l.addAll( from + index, l );
+		}
+
+#else
+		@SuppressWarnings("unchecked")
+		public boolean remove( final Object o ) {
+			int index = indexOf( o );
+			if ( index == -1 ) return false;
+			REMOVE_KEY( index );
+			return true;
+		}
+#endif
+
+	}
+
+}
diff --git a/drv/AbstractListIterator.drv b/drv/AbstractListIterator.drv
new file mode 100644
index 0000000..521a7b9
--- /dev/null
+++ b/drv/AbstractListIterator.drv
@@ -0,0 +1,47 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+/**  An abstract class facilitating the creation of type-specific {@linkplain java.util.ListIterator list iterators}.
+ *
+ * <P>This class provides trivial type-specific implementations of {@link
+ * java.util.ListIterator#set(Object) set()} and {@link java.util.ListIterator#add(Object) add()} which
+ * throw an {@link UnsupportedOperationException}. For primitive types, it also
+ * provides a trivial implementation of {@link java.util.ListIterator#set(Object) set()} and {@link
+ * java.util.ListIterator#add(Object) add()} that just invokes the type-specific one.
+ * 
+ *
+ * @see java.util.ListIterator
+ */
+
+public abstract class KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC extends KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC implements KEY_LIST_ITERATOR KEY_GENERIC {
+
+	protected KEY_ABSTRACT_LIST_ITERATOR() {}
+
+#if #keys(primitive)
+	/** Delegates to the corresponding type-specific method. */
+	public void set( KEY_GENERIC_CLASS ok ) { set( ok.KEY_VALUE() ); }
+	/** Delegates to the corresponding type-specific method. */
+	public void add( KEY_GENERIC_CLASS ok ) { add( ok.KEY_VALUE() ); }
+#endif
+
+	/** This method just throws an  {@link UnsupportedOperationException}. */
+	public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+	/** This method just throws an  {@link UnsupportedOperationException}. */
+	public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+}
diff --git a/drv/AbstractMap.drv b/drv/AbstractMap.drv
new file mode 100644
index 0000000..979c508
--- /dev/null
+++ b/drv/AbstractMap.drv
@@ -0,0 +1,305 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION;
+import VALUE_PACKAGE.VALUE_ITERATOR;
+import VALUE_PACKAGE.VALUE_ABSTRACT_ITERATOR;
+
+import it.unimi.dsi.fastutil.objects.ObjectSet;
+
+#if #keys(primitive) && #values(primitive)
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+#endif
+
+import java.util.Iterator;
+import java.util.Map;
+
+/** An abstract class providing basic methods for maps implementing a type-specific interface.
+ *
+ * <P>Optional operations just throw an {@link
+ * UnsupportedOperationException}. Generic versions of accessors delegate to
+ * the corresponding type-specific counterparts following the interface rules
+ * (they take care of returning <code>null</code> on a missing key).
+ *
+ * <P>As a further help, this class provides a {@link BasicEntry BasicEntry} inner class
+ * that implements a type-specific version of {@link java.util.Map.Entry}; it
+ * is particularly useful for those classes that do not implement their own
+ * entries (e.g., most immutable maps).
+ */
+
+public abstract class ABSTRACT_MAP KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable {
+
+	private static final long serialVersionUID = -4940583368468432370L;
+	
+	protected ABSTRACT_MAP() {}
+
+#if #values(primitive)
+	public boolean containsValue( Object ov ) {
+		return containsValue( VALUE_OBJ2TYPE( ov ) );
+	}
+#endif
+
+	/** Checks whether the given value is contained in {@link #values()}. */
+	public boolean containsValue( VALUE_TYPE v ) {
+		return values().contains( v );
+	}
+
+	/** Checks whether the given value is contained in {@link #keySet()}. */
+	public boolean containsKey( KEY_TYPE k ) {
+		return keySet().contains( k );
+	}
+
+	/** Puts all pairs in the given map.
+	 * If the map implements the interface of this map,
+	 * it uses the faster iterators.
+	 *
+	 * @param m a map.
+	 */
+	@SuppressWarnings("unchecked")
+	public void putAll(Map<? extends KEY_GENERIC_CLASS,? extends VALUE_GENERIC_CLASS> m) {
+		int n = m.size();
+		final Iterator<? extends Map.Entry<? extends KEY_GENERIC_CLASS,? extends VALUE_GENERIC_CLASS>> i = m.entrySet().iterator();
+
+		if (m instanceof MAP) {
+			MAP.Entry KEY_VALUE_EXTENDS_GENERIC e;
+			while(n-- != 0) {
+				e = (MAP.Entry KEY_VALUE_EXTENDS_GENERIC)i.next();
+				put(e.ENTRY_GET_KEY(), e.ENTRY_GET_VALUE());
+			}
+		}
+		else {
+			Map.Entry<? extends KEY_GENERIC_CLASS,? extends VALUE_GENERIC_CLASS> e;
+			while(n-- != 0) {
+				e = i.next();
+				put(e.getKey(), e.getValue());
+			}
+		} 
+	}
+
+	public boolean isEmpty() {
+		return size() == 0;
+	}
+
+	/** This class provides a basic but complete type-specific entry class for all those maps implementations
+	 * that do not have entries on their own (e.g., most immutable maps). 
+	 *
+	 * <P>This class does not implement {@link java.util.Map.Entry#setValue(Object) setValue()}, as the modification
+	 * would not be reflected in the base map.
+	 */
+
+	public static class BasicEntry KEY_VALUE_GENERIC implements MAP.Entry KEY_VALUE_GENERIC {
+		protected KEY_GENERIC_TYPE key;
+		protected VALUE_GENERIC_TYPE value;
+
+		public BasicEntry( final KEY_GENERIC_CLASS key, final VALUE_GENERIC_CLASS value ) {
+			this.key = KEY_CLASS2TYPE(key);
+			this.value = VALUE_CLASS2TYPE(value);
+		}
+
+#if #keys(primitive) || #values(primitive)
+		  
+		public BasicEntry( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) {
+			this.key = key;
+			this.value = value;
+		}
+		  
+#endif
+
+		public KEY_GENERIC_CLASS getKey() {
+			return KEY2OBJ(key);
+		}
+		  
+#if #keys(primitive)
+		public KEY_TYPE ENTRY_GET_KEY() {
+			return key;
+		}
+#endif
+
+		public VALUE_GENERIC_CLASS getValue() {
+			return VALUE2OBJ(value);
+		}
+		  
+#if #values(primitive)
+		public VALUE_TYPE ENTRY_GET_VALUE() {
+			return value;
+		}
+#endif
+
+		public VALUE_GENERIC_TYPE setValue( final VALUE_GENERIC_TYPE value ) {
+			throw new UnsupportedOperationException();
+		}
+		  
+#if #values(primitive)
+		  
+		public VALUE_GENERIC_CLASS setValue( final VALUE_GENERIC_CLASS value ) {
+			return VALUE_CLASS.valueOf(setValue(value.VALUE_VALUE()));
+		}
+
+#endif
+
+		public boolean equals( final Object o ) {
+			if (!(o instanceof Map.Entry)) return false;
+			Map.Entry<?,?> e = (Map.Entry<?,?>)o;
+				
+			return KEY_EQUALS( key, KEY_OBJ2TYPE( e.getKey() ) ) && VALUE_EQUALS( value, VALUE_OBJ2TYPE( e.getValue() ) );
+		}
+		  
+		public int hashCode() {
+			return KEY2JAVAHASH(key) ^ VALUE2JAVAHASH(value);
+		}
+		  
+		  
+		public String toString() {
+			return key + "->" + value;
+		}
+	}
+
+
+	/** Returns a type-specific-set view of the keys of this map.
+	 *
+	 * <P>The view is backed by the set returned by {@link #entrySet()}. Note that
+	 * <em>no attempt is made at caching the result of this method</em>, as this would
+	 * require adding some attributes that lightweight implementations would
+	 * not need. Subclasses may easily override this policy by calling
+	 * this method and caching the result, but implementors are encouraged to
+	 * write more efficient ad-hoc implementations.
+	 *
+	 * @return a set view of the keys of this map; it may be safely cast to a type-specific interface.
+	 */
+
+
+	public SET KEY_GENERIC keySet() {
+		return new ABSTRACT_SET KEY_GENERIC() {
+
+				public boolean contains( final KEY_TYPE k ) { return containsKey( k ); }
+
+				public int size() { return ABSTRACT_MAP.this.size(); }
+				public void clear() { ABSTRACT_MAP.this.clear(); }
+
+				public KEY_ITERATOR KEY_GENERIC iterator() {
+					return new KEY_ABSTRACT_ITERATOR KEY_GENERIC() {
+							final ObjectIterator<Map.Entry<KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS>> i = entrySet().iterator();
+
+							public KEY_GENERIC_TYPE NEXT_KEY() { return ((MAP.Entry KEY_VALUE_GENERIC)i.next()).ENTRY_GET_KEY(); };
+
+							public boolean hasNext() { return i.hasNext(); }
+						};
+				}
+			};
+	}
+
+	/** Returns a type-specific-set view of the values of this map.
+	 *
+	 * <P>The view is backed by the set returned by {@link #entrySet()}. Note that
+	 * <em>no attempt is made at caching the result of this method</em>, as this would
+	 * require adding some attributes that lightweight implementations would
+	 * not need. Subclasses may easily override this policy by calling
+	 * this method and caching the result, but implementors are encouraged to
+	 * write more efficient ad-hoc implementations.
+	 *
+	 * @return a set view of the values of this map; it may be safely cast to a type-specific interface.
+	 */
+
+
+	public VALUE_COLLECTION VALUE_GENERIC values() {
+		return new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() {
+
+				public boolean contains( final VALUE_TYPE k ) { return containsValue( k ); }
+
+				public int size() { return ABSTRACT_MAP.this.size(); }
+				public void clear() { ABSTRACT_MAP.this.clear(); }
+
+				public VALUE_ITERATOR VALUE_GENERIC iterator() {
+					return new VALUE_ABSTRACT_ITERATOR VALUE_GENERIC() {
+							final ObjectIterator<Map.Entry<KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS>> i = entrySet().iterator();
+
+							public VALUE_GENERIC_TYPE NEXT_VALUE() { return ((MAP.Entry KEY_VALUE_GENERIC)i.next()).ENTRY_GET_VALUE(); };
+
+							public boolean hasNext() { return i.hasNext(); }
+						};
+				}
+			};
+	}
+
+
+	@SuppressWarnings({ "unchecked", "rawtypes" })
+	public ObjectSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() {
+		return (ObjectSet)ENTRYSET();
+	}
+
+
+
+	/** Returns a hash code for this map.
+	 *
+	 * The hash code of a map is computed by summing the hash codes of its entries.
+	 *
+	 * @return a hash code for this map.
+	 */
+
+	public int hashCode() {
+		int h = 0, n = size();
+		final ObjectIterator<? extends Map.Entry<KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS>> i = entrySet().iterator();
+
+		while( n-- != 0 ) h += i.next().hashCode();
+		return h;
+	}
+
+	public boolean equals(Object o) {
+		if ( o == this ) return true;
+		if ( ! ( o instanceof Map ) ) return false;
+
+		Map<?,?> m = (Map<?,?>)o; 
+		if ( m.size() != size() ) return false; 
+		return entrySet().containsAll( m.entrySet() ); 
+	}
+
+
+	public String toString() {
+		final StringBuilder s = new StringBuilder();
+		final ObjectIterator<? extends Map.Entry<KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS>> i = entrySet().iterator();
+		int n = size();
+		MAP.Entry KEY_VALUE_GENERIC e;
+		boolean first = true;
+
+		s.append("{");
+
+		while(n-- != 0) {
+			if (first) first = false;
+			else s.append(", ");
+
+			e = (MAP.Entry KEY_VALUE_GENERIC)i.next();
+
+#if #keys(reference)
+			if (this == e.getKey()) s.append("(this map)"); else
+#endif
+				s.append(String.valueOf(e.ENTRY_GET_KEY()));
+			s.append("=>");
+#if #values(reference)
+			if (this == e.getValue()) s.append("(this map)"); else
+#endif
+				s.append(String.valueOf(e.ENTRY_GET_VALUE()));
+		}
+
+		s.append("}");
+		return s.toString();
+	}
+	 
+
+}
diff --git a/drv/AbstractPriorityQueue.drv b/drv/AbstractPriorityQueue.drv
new file mode 100644
index 0000000..df809eb
--- /dev/null
+++ b/drv/AbstractPriorityQueue.drv
@@ -0,0 +1,42 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.AbstractPriorityQueue;
+
+/**  An abstract class providing basic methods for priority queues implementing a type-specific interface.
+ *
+ */
+
+public abstract class ABSTRACT_PRIORITY_QUEUE KEY_GENERIC extends AbstractPriorityQueue<KEY_GENERIC_CLASS> implements PRIORITY_QUEUE KEY_GENERIC  {
+
+	/** Delegates to the corresponding type-specific method. */
+	public void enqueue( final KEY_GENERIC_CLASS x ) { enqueue( x.KEY_VALUE() ); }
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS dequeue() { return KEY2OBJ( DEQUEUE() ); }
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS first() { return KEY2OBJ( FIRST() ); }
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS last() { return KEY2OBJ( LAST() ); }
+
+	/** Throws an {@link UnsupportedOperationException}. */
+	public KEY_TYPE LAST() { throw new UnsupportedOperationException(); }
+}
diff --git a/drv/AbstractSet.drv b/drv/AbstractSet.drv
new file mode 100644
index 0000000..298af84
--- /dev/null
+++ b/drv/AbstractSet.drv
@@ -0,0 +1,82 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Set;
+
+/**  An abstract class providing basic methods for sets implementing a type-specific interface. */
+
+public abstract class ABSTRACT_SET KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC implements Cloneable, SET KEY_GENERIC {
+	 
+	protected ABSTRACT_SET() {}
+
+	public abstract KEY_ITERATOR KEY_GENERIC iterator();
+
+	public boolean equals( final Object o ) {
+		if ( o == this ) return true;
+		if ( !( o instanceof Set ) ) return false;
+
+		Set<?> s = (Set<?>) o;
+		if ( s.size() != size() ) return false;
+		return containsAll(s);
+	}
+
+
+	/** Returns a hash code for this set.
+	 *
+	 * The hash code of a set is computed by summing the hash codes of
+	 * its elements.
+	 *
+	 * @return a hash code for this set.
+	 */
+
+	public int hashCode() {
+		int h = 0, n = size();
+		KEY_ITERATOR KEY_GENERIC i = iterator();
+		KEY_GENERIC_TYPE k;
+
+		while( n-- != 0 ) {
+			k = i.NEXT_KEY(); // We need k because KEY2JAVAHASH() is a macro with repeated evaluation.
+			h += KEY2JAVAHASH( k );
+		}
+		return h;
+	}
+
+
+	public boolean remove( KEY_TYPE k ) {
+		throw new UnsupportedOperationException();
+	}
+
+#if #keys(primitive)
+
+	/** Delegates to <code>remove()</code>.
+	 *
+	 * @param k the element to be removed.
+	 * @return true if the set was modified.
+	 */
+	public boolean rem( KEY_TYPE k ) {
+		return remove( k );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public boolean remove( final Object o ) {
+		return remove( KEY_OBJ2TYPE( o ) );
+	}
+
+#endif
+}
diff --git a/drv/AbstractSortedMap.drv b/drv/AbstractSortedMap.drv
new file mode 100644
index 0000000..83de345
--- /dev/null
+++ b/drv/AbstractSortedMap.drv
@@ -0,0 +1,174 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION;
+import VALUE_PACKAGE.VALUE_ABSTRACT_ITERATOR;
+import VALUE_PACKAGE.VALUE_ITERATOR;
+import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator;
+import it.unimi.dsi.fastutil.objects.ObjectSortedSet;
+import java.util.Map;
+
+#if #keys(reference)
+import java.util.Comparator;
+#endif
+
+/** An abstract class providing basic methods for sorted maps implementing a type-specific interface. */
+
+public abstract class ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC {
+
+	private static final long serialVersionUID = -1773560792952436569L;
+
+	protected ABSTRACT_SORTED_MAP() {}
+
+#if #keys(primitive)
+	/** Delegates to the corresponding type-specific method. */
+	public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_CLASS to  ) {
+		return headMap( KEY_CLASS2TYPE( to ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_CLASS from ) {
+		return tailMap( KEY_CLASS2TYPE( from ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) {
+		return subMap( KEY_CLASS2TYPE( from ), KEY_CLASS2TYPE( to ) );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS firstKey() {
+		return KEY2OBJ( FIRST_KEY() );
+	}
+	 
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS lastKey() {
+		return KEY2OBJ( LAST_KEY() );
+	}
+#endif
+
+
+	/** Returns a type-specific-sorted-set view of the keys of this map.
+	 *
+	 * <P>The view is backed by the sorted set returned by {@link #entrySet()}. Note that
+	 * <em>no attempt is made at caching the result of this method</em>, as this would
+	 * require adding some attributes that lightweight implementations would
+	 * not need. Subclasses may easily override this policy by calling
+	 * this method and caching the result, but implementors are encouraged to
+	 * write more efficient ad-hoc implementations.
+	 *
+	 * @return a sorted set view of the keys of this map; it may be safely cast to a type-specific interface.
+	 */
+
+
+	public SORTED_SET KEY_GENERIC keySet() {
+		return new KeySet();
+	}
+	
+	/** A wrapper exhibiting the keys of a map. */
+
+	protected class KeySet extends ABSTRACT_SORTED_SET KEY_GENERIC {
+		public boolean contains( final KEY_TYPE k ) { return containsKey( k ); }
+		public int size() { return ABSTRACT_SORTED_MAP.this.size(); }
+		public void clear() { ABSTRACT_SORTED_MAP.this.clear(); }
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return ABSTRACT_SORTED_MAP.this.comparator(); }
+
+		public KEY_GENERIC_TYPE FIRST() { return FIRST_KEY(); }
+		public KEY_GENERIC_TYPE LAST() { return LAST_KEY(); }
+
+		public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to  ) { return headMap( to ).keySet(); }
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { return tailMap( from ).keySet(); }
+		public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return subMap( from, to ).keySet(); }
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new KeySetIterator KEY_VALUE_GENERIC( entrySet().iterator( new BasicEntry KEY_VALUE_GENERIC( from, VALUE_NULL ) ) ); }
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new KeySetIterator KEY_VALUE_GENERIC( entrySet().iterator() ); }
+
+
+	}
+	/** A wrapper exhibiting a map iterator as an iterator on keys.
+	 *
+	 * <P>To provide an iterator on keys, just create an instance of this
+	 * class using the corresponding iterator on entries.
+	 */
+
+	protected static class KeySetIterator KEY_VALUE_GENERIC extends KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC {
+		protected final ObjectBidirectionalIterator<Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> i;
+
+		public KeySetIterator( ObjectBidirectionalIterator<Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> i ) {
+			this.i = i;
+		}
+
+		public KEY_GENERIC_TYPE NEXT_KEY() { return KEY_CLASS2TYPE( i.next().getKey() ); };
+		public KEY_GENERIC_TYPE PREV_KEY() { return KEY_CLASS2TYPE( i.previous().getKey() ); };
+
+		public boolean hasNext() { return i.hasNext(); }
+		public boolean hasPrevious() { return i.hasPrevious(); }
+	}
+	
+
+
+	/** Returns a type-specific collection view of the values contained in this map.
+	 *
+	 * <P>The view is backed by the sorted set returned by {@link #entrySet()}. Note that
+	 * <em>no attempt is made at caching the result of this method</em>, as this would
+	 * require adding some attributes that lightweight implementations would
+	 * not need. Subclasses may easily override this policy by calling
+	 * this method and caching the result, but implementors are encouraged to
+	 * write more efficient ad-hoc implementations.
+	 *
+	 * @return a type-specific collection view of the values contained in this map.
+	 */
+
+	public VALUE_COLLECTION VALUE_GENERIC values() {
+		return new ValuesCollection();
+	}
+
+	/** A wrapper exhibiting the values of a map. */
+	protected class ValuesCollection extends VALUE_ABSTRACT_COLLECTION VALUE_GENERIC {
+		public VALUE_ITERATOR VALUE_GENERIC iterator() { return new ValuesIterator KEY_VALUE_GENERIC( entrySet().iterator() ); }
+		public boolean contains( final VALUE_TYPE k ) { return containsValue( k ); } 
+		public int size() { return ABSTRACT_SORTED_MAP.this.size(); }
+		public void clear() { ABSTRACT_SORTED_MAP.this.clear(); }
+
+	}
+
+	/** A wrapper exhibiting a map iterator as an iterator on values.
+	 *
+	 * <P>To provide an iterator on values, just create an instance of this
+	 * class using the corresponding iterator on entries.
+	 */
+
+	protected static class ValuesIterator KEY_VALUE_GENERIC extends VALUE_ABSTRACT_ITERATOR VALUE_GENERIC {
+		protected final ObjectBidirectionalIterator<Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> i;
+
+		public ValuesIterator( ObjectBidirectionalIterator<Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> i ) {
+			this.i = i;
+		}
+
+		public VALUE_GENERIC_TYPE NEXT_VALUE() { return VALUE_CLASS2TYPE( i.next().getValue() ); };
+		public boolean hasNext() { return i.hasNext(); }
+	}
+
+	@SuppressWarnings({ "unchecked", "rawtypes" })
+	public ObjectSortedSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() {
+		return (ObjectSortedSet)ENTRYSET();
+	}
+}
diff --git a/drv/AbstractSortedSet.drv b/drv/AbstractSortedSet.drv
new file mode 100644
index 0000000..90c2c34
--- /dev/null
+++ b/drv/AbstractSortedSet.drv
@@ -0,0 +1,61 @@
+/*		 
+ * Copyright (C) 2003-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+/** An abstract class providing basic methods for sorted sets implementing a type-specific interface. */
+
+public abstract class ABSTRACT_SORTED_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements SORTED_SET KEY_GENERIC {
+
+	protected ABSTRACT_SORTED_SET() {}
+
+#if #keys(primitive)
+	/** Delegates to the corresponding type-specific method. */
+	public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_CLASS to  ) {
+		return headSet( to.KEY_VALUE() );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_CLASS from ) {
+		return tailSet( from.KEY_VALUE() );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) {
+		return subSet( from.KEY_VALUE(), to.KEY_VALUE() );
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS first() {
+		return KEY2OBJ( FIRST() );
+	}
+	 
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS last() {
+		return KEY2OBJ( LAST() );
+	}
+#endif
+
+	/** Delegates to the new covariantly stronger generic method. */
+	
+	@Deprecated
+	public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() {
+		return iterator();
+	}
+
+	public abstract KEY_BIDI_ITERATOR KEY_GENERIC iterator();
+}
diff --git a/drv/AbstractStack.drv b/drv/AbstractStack.drv
new file mode 100644
index 0000000..6007e23
--- /dev/null
+++ b/drv/AbstractStack.drv
@@ -0,0 +1,72 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.AbstractStack;
+
+/** An abstract class providing basic methods for implementing a type-specific stack interface.
+ *
+ * <P>To create a type-specific stack, you need both object methods and
+ * primitive-type methods. However, if you inherit from this class you need
+ * just one (anyone).
+ */
+
+public abstract class ABSTRACT_STACK KEY_GENERIC extends AbstractStack<KEY_GENERIC_CLASS> implements STACK KEY_GENERIC {
+
+	protected ABSTRACT_STACK() {}
+
+	/** Delegates to the corresponding type-specific method. */
+	public void push( KEY_GENERIC_CLASS o ) {
+		push( o.KEY_VALUE() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS pop() {
+		return KEY_CLASS.valueOf( POP() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS top() {
+		return KEY_CLASS.valueOf( TOP() ); 
+	}
+
+	/** Delegates to the corresponding type-specific method. */
+	public KEY_GENERIC_CLASS peek( int i ) {
+		return KEY_CLASS.valueOf( PEEK( i ) ); 
+	}
+
+	/** Delegates to the corresponding generic method. */
+	public void push( KEY_TYPE k ) {
+		push( KEY_CLASS.valueOf( k ) ); 
+	}
+
+	/** Delegates to the corresponding generic method. */
+	public KEY_TYPE POP() {
+		return pop().KEY_VALUE(); 
+	}
+
+	/** Delegates to the corresponding generic method. */
+	public KEY_TYPE TOP() {
+		return top().KEY_VALUE(); 
+	}
+
+	/** Delegates to the corresponding generic method. */
+	public KEY_TYPE PEEK( int i ) {
+		return peek( i ).KEY_VALUE();
+	}
+}
diff --git a/drv/ArrayFIFOQueue.drv b/drv/ArrayFIFOQueue.drv
new file mode 100644
index 0000000..9054b7c
--- /dev/null
+++ b/drv/ArrayFIFOQueue.drv
@@ -0,0 +1,206 @@
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+
+import it.unimi.dsi.fastutil.AbstractPriorityQueue;
+#endif
+
+import it.unimi.dsi.fastutil.Arrays;
+
+import java.util.NoSuchElementException;
+
+/** A type-specific array-based FIFO queue, supporting also deque operations.
+ *
+ * <P>Instances of this class represent a FIFO queue using a backing
+ * array in a circular way. The array is enlarged and shrunk as needed. You can use the {@link #trim()} method
+ * to reduce its memory usage, if necessary.
+ *
+ * <P>This class provides additional methods that implement a <em>deque</em> (double-ended queue).
+ */
+
+public class ARRAY_FIFO_QUEUE KEY_GENERIC extends ABSTRACT_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The standard initial capacity of a queue. */
+	public final static int INITIAL_CAPACITY = 4;
+
+	/** The backing array. */
+	@SuppressWarnings("unchecked")
+	protected KEY_GENERIC_TYPE array[] = KEY_GENERIC_ARRAY_CAST ARRAYS.EMPTY_ARRAY;
+
+	/** The current (cached) length of {@link #array}. */
+	protected int length;
+	
+	/** The start position in {@link #array}. It is always strictly smaller than {@link #length}.*/
+	protected int start;
+	
+	/** The end position in {@link #array}. It is always strictly smaller than {@link #length}.
+	 *  Might be actually smaller than {@link #start} because {@link #array} is used cyclically. */
+	protected int end;
+	
+	/** Creates a new empty queue with given capacity.
+	 *
+	 * @param capacity the initial capacity of this queue.
+	 */
+	@SuppressWarnings("unchecked")
+	public ARRAY_FIFO_QUEUE( final int capacity ) {
+		if ( capacity < 0 ) throw new IllegalArgumentException( "Initial capacity (" + capacity + ") is negative" );
+		array = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ capacity ];
+		length = capacity;
+	}
+
+
+	/** Creates a new empty queue with standard {@linkplain #INITIAL_CAPACITY initial capacity}.
+	 */
+	public ARRAY_FIFO_QUEUE() {
+		this( INITIAL_CAPACITY );
+	}
+
+	/** Returns <code>null</code> (FIFO queues have no comparator). 
+	 * @return <code>null</code>.
+	 */ 
+	@Override
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+		return null;
+	}
+
+	@Override
+	public KEY_GENERIC_TYPE DEQUEUE() {
+		if ( start == end ) throw new NoSuchElementException();
+		final KEY_GENERIC_TYPE t = array[ start ];
+#if #keys(reference)
+		array[ start ] = null; // Clean-up for the garbage collector.
+#endif
+		if ( ++start == length ) start = 0;
+		reduce();
+		return t;
+	}
+
+	/** Dequeues the {@linkplain #last() last} element from the queue.
+	 *
+	 * @return the dequeued element. 
+	 * @throws NoSuchElementException if the queue is empty.
+	 */
+	public KEY_GENERIC_TYPE DEQUEUE_LAST() {
+		if ( start == end ) throw new NoSuchElementException();
+		if ( end == 0 ) end = length;
+		final KEY_GENERIC_TYPE t = array[ --end ];
+#if #keys(reference)
+		array[ end ] = null; // Clean-up for the garbage collector.
+#endif
+		reduce();
+		return t;
+	}
+
+	@SuppressWarnings("unchecked")
+	private final void resize( final int size, final int newLength ) {
+		final KEY_GENERIC_TYPE[] newArray = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ newLength ];
+		if ( start >= end ) {
+			if ( size != 0 ) {
+				System.arraycopy( array, start, newArray, 0, length - start );
+				System.arraycopy( array, 0, newArray, length - start, end );
+			}
+		}
+		else System.arraycopy( array, start, newArray, 0, end - start );
+		start = 0;
+		end = size;
+		array = newArray;
+		length = newLength;
+	}	
+
+	private final void expand() {
+		resize( length, (int)Math.min( Arrays.MAX_ARRAY_SIZE, 2L * length ) );
+	}
+
+	private final void reduce() {
+		final int size = size();
+		if ( length > INITIAL_CAPACITY && size <= length / 4 ) resize( size, (int)(( length + 1L ) / 2) ); // This turns Integer.MAX_VALUE into 2 << 30.
+	}
+
+	@Override
+	public void enqueue( KEY_GENERIC_TYPE x ) {
+		array[ end++ ] = x;
+		if ( end == length ) end = 0;
+		if ( end == start ) expand();
+	}
+
+	/** Enqueues a new element as the {@linkplain #first() first} element (in dequeuing order) of the queue.
+	 */
+	public void enqueueFirst( KEY_GENERIC_TYPE x ) {
+		if ( start == 0 ) start = length;
+		array[ --start ] = x;
+		if ( end == start ) expand();
+	}
+
+	/** Returns the first element of the queue. 
+	 * @return the first element of the queue.	
+	 */
+	public KEY_GENERIC_TYPE FIRST() {
+		if ( start == end ) throw new NoSuchElementException();
+		return array[ start ];
+	}
+
+
+	/** Returns the last element of the queue. 
+	 * @return the last element of the queue.	
+	 */
+	public KEY_GENERIC_TYPE LAST() {
+		if ( start == end ) throw new NoSuchElementException();
+		return array[ ( end == 0 ? length : end ) - 1 ];
+	}
+
+	@Override
+	public void clear() {
+#if #keys(reference)
+		if ( start <= end ) ObjectArrays.fill( array, start, end, null );
+		else {
+			ObjectArrays.fill( array, start, length, null );
+			ObjectArrays.fill( array, 0, end, null );
+		}
+#endif
+		start = end = 0;
+	}
+
+	/** Trims the queue to the smallest possible size. */		
+	@SuppressWarnings("unchecked")
+	public void trim() {
+		final int size = size();
+		final KEY_GENERIC_TYPE[] newArray = 
+#if #keys(primitive)
+											new KEY_GENERIC_TYPE[ size + 1 ];
+#else
+											(KEY_GENERIC_TYPE[])new Object[ size + 1 ];
+#endif
+		if ( start <= end ) System.arraycopy( array, start, newArray, 0, end - start );
+		else {
+			System.arraycopy( array, start, newArray, 0, length - start );
+			System.arraycopy( array, 0, newArray, length - start, end );
+		}
+		start = 0;
+		length = ( end = size ) + 1;
+		array = newArray;
+	}
+		
+	@Override
+	public int size() {
+		final int apparentLength = end - start;
+		return apparentLength >= 0 ? apparentLength : length + apparentLength;
+	}
+}
diff --git a/drv/ArrayFrontCodedList.drv b/drv/ArrayFrontCodedList.drv
new file mode 100644
index 0000000..5478f03
--- /dev/null
+++ b/drv/ArrayFrontCodedList.drv
@@ -0,0 +1,712 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.objects.AbstractObjectListIterator;
+import it.unimi.dsi.fastutil.objects.AbstractObjectList;
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+import it.unimi.dsi.fastutil.longs.LongArrays;
+
+import java.io.Serializable;
+import java.util.Iterator;
+import java.util.Collection;
+import java.util.NoSuchElementException;
+
+/** Compact storage of lists of arrays using front coding.
+ * 
+ * <P>This class stores immutably a list of arrays in a single large array
+ * using front coding (of course, the compression will be reasonable only if
+ * the list is sorted lexicographically—see below). It implements an
+ * immutable type-specific list that returns the <var>i</var>-th array when
+ * calling {@link #get(int) get(<var>i</var>)}. The returned array may be
+ * freely modified.
+ *
+ * <P>Front coding is based on the idea that if the <var>i</var>-th and the
+ * (<var>i</var>+1)-th array have a common prefix, we might store the length
+ * of the common prefix, and then the rest of the second array.
+ *
+ * <P>This approach, of course, requires that once in a while an array is
+ * stored entirely.  The <def>ratio</def> of a front-coded list defines how
+ * often this happens (once every {@link #ratio()} arrays). A higher ratio
+ * means more compression, but means also a longer access time, as more arrays
+ * have to be probed to build the result. Note that we must build an array
+ * every time {@link #get(int)} is called, but this class provides also methods
+ * that extract one of the stored arrays in a given array, reducing garbage
+ * collection. See the documentation of the family of <code>get()</code>
+ * methods.
+ *
+ * <P>By setting the ratio to 1 we actually disable front coding: however, we
+ * still have a data structure storing large list of arrays with a reduced
+ * overhead (just one integer per array, plus the space required for lengths).
+ *
+ * <P>Note that the typical usage of front-coded lists is under the form of
+ * serialized objects; usually, the data that has to be compacted is processed
+ * offline, and the resulting structure is stored permanently. Since the
+ * pointer array is not stored, the serialized format is very small.
+ *
+ * <H2>Implementation Details</H2>
+ * 
+ * <P>All arrays are stored in a {@linkplain it.unimi.dsi.fastutil.BigArrays big array}. A separate array of pointers
+ * indexes arrays whose position is a multiple of the ratio: thus, a higher ratio
+ * means also less pointers.
+ * 
+ * <P>More in detail, an array whose position is a multiple of the ratio is
+ * stored as the array length, followed by the elements of the array. The array
+ * length is coded by a simple variable-length list of <var>k</var>-1 bit
+ * blocks, where <var>k</var> is the number of bits of the underlying primitive
+ * type.  All other arrays are stored as follows: let <code>common</code> the
+ * length of the maximum common prefix between the array and its predecessor.
+ * Then we store the array length decremented by <code>common</code>, followed
+ * by <code>common</code>, followed by the array elements whose index is
+ * greater than or equal to <code>common</code>. For instance, if we store
+ * <samp>foo</samp>, <samp>foobar</samp>, <samp>football</samp> and
+ * <samp>fool</samp> in a front-coded character-array list with ratio 3, the
+ * character array will contain
+ *
+ * <pre>
+ * <b>3</b> f o o <b>3</b> <b>3</b> b a r <b>5</b> <b>3</b> t b a l l <b>4</b> f o o l 
+ * </pre>
+ */
+
+public class ARRAY_FRONT_CODED_LIST extends AbstractObjectList<KEY_TYPE[]> implements Serializable, Cloneable {
+
+	private static final long serialVersionUID = 1L;
+
+	/** The number of arrays in the list. */
+	protected final int n;
+	/** The ratio of this front-coded list. */
+	protected final int ratio;
+	/** The big array containing the compressed arrays. */
+	protected final KEY_TYPE[][] array;
+	/** The pointers to entire arrays in the list. */
+	protected transient long[] p;
+
+	/** Creates a new front-coded list containing the arrays returned by the given iterator.
+	 * 
+	 * @param arrays an iterator returning arrays.
+	 * @param ratio the desired ratio.
+	 */
+
+	public ARRAY_FRONT_CODED_LIST( final Iterator<KEY_TYPE[]> arrays, final int ratio ) {
+
+		if ( ratio < 1 ) throw new IllegalArgumentException( "Illegal ratio (" + ratio + ")" );
+
+		KEY_TYPE[][] array = BIG_ARRAYS.EMPTY_BIG_ARRAY;
+		long[] p = LongArrays.EMPTY_ARRAY;
+
+		KEY_TYPE[][] a = new KEY_TYPE[ 2 ][];
+		long curSize = 0;
+		int n = 0, b = 0, common, length, minLength;
+		
+		while( arrays.hasNext() ) {
+			a[ b ] = arrays.next();
+			length = a[ b ].length;
+			
+			if ( n % ratio == 0 ) {
+				p = LongArrays.grow( p, n / ratio + 1 );
+				p[ n / ratio ] = curSize;
+
+				array = BIG_ARRAYS.grow( array, curSize + count( length ) + length, curSize );
+				curSize += writeInt( array, length, curSize );
+				BIG_ARRAYS.copyToBig( a[ b ], 0, array, curSize, length );
+				curSize += length;
+			}
+			else {
+				minLength = a[ 1 - b ].length;
+				if ( length < minLength ) minLength = length;
+				for( common = 0; common < minLength; common++ ) if ( a[ 0 ][ common ] != a[ 1 ][ common ] ) break;
+				length -= common;
+
+				array = BIG_ARRAYS.grow( array, curSize + count( length ) + count( common ) + length, curSize );
+				curSize += writeInt( array, length, curSize );
+				curSize += writeInt( array, common, curSize );
+				BIG_ARRAYS.copyToBig( a[ b ], common, array, curSize, length );
+				curSize += length;
+			}
+
+			b = 1 - b;
+			n++;
+		}
+
+		this.n = n;
+		this.ratio = ratio;
+		this.array = BIG_ARRAYS.trim( array, curSize );
+		this.p = LongArrays.trim( p, ( n + ratio - 1 ) / ratio );
+
+	}
+
+	/** Creates a new front-coded list containing the arrays in the given collection.
+	 * 
+	 * @param c a collection containing arrays.
+	 * @param ratio the desired ratio.
+	 */
+
+	public ARRAY_FRONT_CODED_LIST( final Collection<KEY_TYPE[]> c, final int ratio ) {
+		this( c.iterator(), ratio );
+	}
+
+
+
+	/* The following (rather messy) methods implements the encoding of arbitrary integers inside a big array.
+	 * Unfortunately, we have to specify different codes for almost every type. */
+
+	/** Reads a coded length.
+	 * @param a the data big array.
+	 * @param pos the starting position.
+	 * @return the length coded at <code>pos</code>.
+	 */
+	private static int readInt( final KEY_TYPE a[][], long pos ) {
+#if #keyclass(Integer)
+		return IntBigArrays.get( a, pos );
+#elif #keyclass(Long)
+		return (int)LongBigArrays.get( a, pos );
+#elif #keyclass(Character)
+		final char c0 = CharBigArrays.get( a, pos );
+		return c0  < 0x8000 ? c0 : ( c0 & 0x7FFF ) << 16 | CharBigArrays.get( a, pos + 1 );
+#elif #keyclass(Short)
+		final short s0 = ShortBigArrays.get( a, pos );
+		return s0 >= 0 ? s0 : s0 << 16 | ( ShortBigArrays.get( a, pos + 1 ) & 0xFFFF );
+#else
+		final byte b0 = ByteBigArrays.get( a, pos );
+		if ( b0  >= 0 ) return b0;
+		final byte b1 = ByteBigArrays.get( a, pos + 1 );
+		if ( b1 >= 0 ) return ( - b0 - 1 ) << 7 | b1;
+		final byte b2 = ByteBigArrays.get( a, pos + 2 );
+		if ( b2 >= 0 ) return ( - b0 - 1 ) << 14 | ( - b1 - 1 ) << 7 | b2;
+		final byte b3 = ByteBigArrays.get( a, pos + 3 );
+		if ( b3 >= 0 ) return ( - b0 - 1 ) << 21 | ( - b1 - 1 ) << 14 | ( - b2 - 1 ) << 7 | b3;
+		return ( - b0 - 1 ) << 28 | ( - b1 - 1 ) << 21 | ( - b2 - 1 ) << 14 | ( - b3 - 1 ) << 7 | ByteBigArrays.get( a, pos + 4 );
+#endif
+	}
+
+	/** Computes the number of elements coding a given length.
+	 * @param length the length to be coded.
+	 * @return the number of elements coding <code>length</code>.
+	 */
+	@SuppressWarnings("unused")
+	private static int count( final int length ) {
+#if #keyclass(Integer) || #keyclass(Long)
+		return 1;
+#elif #keyclass(Character) || #keyclass(Short)
+		return length < ( 1 << 15 ) ? 1 : 2;
+#else
+		if ( length < ( 1 << 7 ) ) return 1;
+		if ( length < ( 1 << 14 ) ) return 2;
+		if ( length < ( 1 << 21 ) ) return 3;
+		if ( length < ( 1 << 28 ) ) return 4;
+		return 5;
+#endif
+	}
+
+	/** Writes a length.
+	 * @param a the data array.
+	 * @param length the length to be written.
+	 * @param pos the starting position.
+	 * @return the number of elements coding <code>length</code>.
+	 */
+	private static int writeInt( final KEY_TYPE a[][], int length, long pos ) {
+#if #keyclass(Long)
+		LongBigArrays.set( a, pos, length );
+		return 1;
+#elif #keyclass(Integer)
+		IntBigArrays.set( a, pos, length );
+		return 1;
+#elif #keyclass(Character)
+		if ( length < ( 1 << 15 ) ) {
+			CharBigArrays.set( a, pos, (char)length );
+			return 1;
+		}
+		CharBigArrays.set( a, pos++, (char)( length >>> 16 | 0x8000 ) );
+		CharBigArrays.set( a, pos, (char)( length & 0xFFFF ) );
+		return 2;
+#elif #keyclass(Short)
+		if ( length < ( 1 << 15 ) ) {
+			ShortBigArrays.set( a, pos, (short)length );
+			return 1;
+		}
+		ShortBigArrays.set( a, pos++, (short)( - ( length >>> 16 ) - 1 ) );
+		ShortBigArrays.set( a, pos, (short)( length & 0xFFFF ) );
+		return 2;
+#else
+		final int count = count( length );
+		ByteBigArrays.set( a, pos + count - 1, (byte)( length & 0x7F ) );
+
+		if ( count != 1 ) {
+			int i = count - 1;
+			while( i-- != 0 ) {
+				length >>>= 7;
+				ByteBigArrays.set( a, pos + i, (byte)( - ( length  & 0x7F ) - 1 ) );
+			}
+		}
+
+		return count;
+#endif
+	}
+
+
+
+	/** Returns the ratio of this list.
+	 *
+	 * @return the ratio of this list.
+	 */
+
+	public int ratio() {
+		return ratio;
+	}
+
+
+	/** Computes the length of the array at the given index.
+	 *
+	 * <P>This private version of {@link #arrayLength(int)} does not check its argument.
+	 *
+	 * @param index an index.
+	 * @return the length of the <code>index</code>-th array.
+	 */
+	private int length( final int index ) {
+		final KEY_TYPE[][] array = this.array;
+		final int delta = index % ratio; // The index into the p array, and the delta inside the block.
+
+		long pos = p[ index / ratio ]; // The position into the array of the first entire word before the index-th.
+		int length = readInt( array, pos );
+
+		if ( delta == 0 ) return length;
+		
+		// First of all, we recover the array length and the maximum amount of copied elements.
+		int common;
+		pos += count( length ) + length;
+		length = readInt( array, pos );
+		common = readInt( array, pos + count( length ) );
+
+		for( int i = 0; i < delta - 1; i++ ) {
+			pos += count( length ) + count( common ) + length;
+			length = readInt( array, pos );
+			common = readInt( array, pos + count( length ) );
+		}
+
+		return length + common;
+	}
+
+
+	/** Computes the length of the array at the given index.
+	 *
+	 * @param index an index.
+	 * @return the length of the <code>index</code>-th array.
+	 */
+	public int arrayLength( final int index ) {
+		ensureRestrictedIndex( index );
+		return length( index );
+	}
+
+	/** Extracts the array at the given index.
+	 *
+	 * @param index an index.
+	 * @param a the array that will store the result (we assume that it can hold the result).
+	 * @param offset an offset into <code>a</code> where elements will be store.
+	 * @param length a maximum number of elements to store in <code>a</code>.
+	 * @return the length of the extracted array.
+	 */
+	private int extract( final int index, final KEY_TYPE a[], final int offset, final int length ) {
+		final int delta = index % ratio; // The delta inside the block.
+		final long startPos = p[ index / ratio ]; // The position into the array of the first entire word before the index-th.
+		long pos, prevArrayPos;
+		int arrayLength = readInt( array, pos = startPos ), currLen = 0, actualCommon;
+
+		if ( delta == 0 ) {
+			pos = p[ index / ratio ] + count( arrayLength );
+			BIG_ARRAYS.copyFromBig( array, pos, a, offset, Math.min( length, arrayLength ) );
+			return arrayLength;
+		}
+		
+		int common = 0;
+
+		for( int i = 0; i < delta; i++ ) {
+			prevArrayPos = pos + count( arrayLength ) + ( i != 0 ? count( common ) : 0 );
+			pos = prevArrayPos + arrayLength;
+
+			arrayLength = readInt( array, pos );
+			common = readInt( array, pos + count( arrayLength ) );
+
+			actualCommon = Math.min( common, length );
+			if ( actualCommon <= currLen ) currLen = actualCommon;
+			else {
+				BIG_ARRAYS.copyFromBig( array, prevArrayPos, a, currLen + offset, actualCommon - currLen );
+				currLen = actualCommon;
+			}
+		}
+
+		if ( currLen < length ) BIG_ARRAYS.copyFromBig( array, pos + count( arrayLength ) + count( common ), a, currLen + offset, Math.min( arrayLength, length - currLen ) );
+
+		return arrayLength + common;
+	}
+
+	public KEY_TYPE[] get( final int index ) {
+		return getArray( index );
+	}
+
+	/** 
+	 * @see #get(int)
+	 */
+
+	public KEY_TYPE[] getArray( final int index ) {
+		ensureRestrictedIndex( index );
+		final int length = length( index );
+		final KEY_TYPE a[] = new KEY_TYPE[ length ];
+		extract( index, a, 0, length );
+		return a;
+	}
+
+	/** Stores in the given array elements from an array stored in this front-coded list.
+	 *
+	 * @param index an index.
+	 * @param a the array that will store the result.
+	 * @param offset an offset into <code>a</code> where elements will be store.
+	 * @param length a maximum number of elements to store in <code>a</code>.
+	 * @return if <code>a</code> can hold the extracted elements, the number of extracted elements;
+	 * otherwise, the number of remaining elements with the sign changed.
+	 */
+	public int get( final int index, final KEY_TYPE[] a, final int offset, final int length ) {
+		ensureRestrictedIndex( index );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+
+		final int arrayLength = extract( index, a, offset, length );
+		if ( length >= arrayLength ) return arrayLength;
+		return length - arrayLength;
+	}
+
+	/** Stores in the given array an array stored in this front-coded list.
+	 *
+	 * @param index an index.
+	 * @param a the array that will store the content of the result (we assume that it can hold the result).
+	 * @return if <code>a</code> can hold the extracted elements, the number of extracted elements;
+	 * otherwise, the number of remaining elements with the sign changed.
+	 */
+	public int get( final int index, final KEY_TYPE[] a ) {
+		return get( index, a, 0, a.length );
+	}
+
+	public int size() {
+		return n;
+	}
+
+	public ObjectListIterator<KEY_TYPE[]> listIterator( final int start ) {
+		ensureIndex( start );
+
+		return new AbstractObjectListIterator<KEY_TYPE[]>() {
+				KEY_TYPE s[] = ARRAYS.EMPTY_ARRAY;
+				int i = 0;
+				long pos = 0;
+				boolean inSync; // Whether the current value in a is the string just before the next to be produced.
+
+				{
+					if ( start != 0 ) {
+						if ( start == n ) i = start; // If we start at the end, we do nothing.
+						else {
+							pos = p[ start / ratio ];
+							int j = start % ratio;
+							i = start - j;
+							while( j-- != 0 ) next();
+						}
+					}
+				}
+				
+				public boolean hasNext() {
+					return i < n;
+				}
+
+				public boolean hasPrevious() {
+					return i > 0;
+				}
+
+				public int previousIndex() {
+					return i - 1;
+				}
+
+				public int nextIndex() {
+					return i;
+				}
+				
+				public KEY_TYPE[] next() {
+					int length, common;
+
+					if ( ! hasNext() ) throw new NoSuchElementException();
+
+					if ( i % ratio == 0 ) {
+						pos = p[ i / ratio ];
+						length = readInt( array, pos );
+						s = ARRAYS.ensureCapacity( s, length, 0 );
+						BIG_ARRAYS.copyFromBig( array, pos + count( length ), s, 0, length );
+						pos += length + count( length );
+						inSync = true;
+					}
+					else {
+						if ( inSync ) {
+							length = readInt( array, pos );
+							common = readInt( array, pos + count( length ) );
+							s = ARRAYS.ensureCapacity( s, length + common, common );
+							BIG_ARRAYS.copyFromBig( array, pos + count( length ) + count ( common ), s, common, length );
+							pos += count( length ) + count( common ) + length;
+							length += common;
+						}
+						else {
+							s = ARRAYS.ensureCapacity( s, length = length( i ), 0 );
+							extract( i, s, 0, length );
+						}
+					}
+					i++;
+					return ARRAYS.copy( s, 0, length );
+				}
+
+				public KEY_TYPE[] previous() {
+					if ( ! hasPrevious() ) throw new NoSuchElementException();
+					inSync = false;
+					return getArray( --i );
+				}
+			};
+	}
+
+
+	/** Returns a copy of this list. 
+	 *
+	 *  @return a copy of this list.
+	 */
+
+	public ARRAY_FRONT_CODED_LIST clone() {
+		return this;
+	}
+
+
+	public String toString() {
+		final StringBuffer s = new StringBuffer();
+		s.append( "[ " );
+		for( int i = 0; i < n; i++ ) {
+			if ( i != 0 ) s.append( ", " );
+			s.append( ARRAY_LIST.wrap( getArray( i ) ).toString() );
+		}
+		s.append( " ]" );
+		return s.toString();
+	}
+
+	/** Computes the pointer array using the currently set ratio, number of elements and underlying array.
+	 *
+	 * @return the computed pointer array.
+	 */
+
+	protected long[] rebuildPointerArray() {
+		final long[] p = new long[ ( n + ratio - 1 ) / ratio ];
+		final KEY_TYPE a[][] = array;
+		int length, count;
+		long pos = 0;
+
+		for( int i = 0, j = 0, skip = ratio - 1; i < n; i++ ) {
+			length = readInt( a, pos );
+			count = count( length );
+			if ( ++skip == ratio ) {
+				skip = 0;
+				p[ j++ ] = pos;
+				pos += count + length;
+			}
+			else pos += count + count( readInt( a, pos + count ) ) + length;
+		}
+
+		return p;
+	}
+
+	
+	private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+
+		// Rebuild pointer array
+		p = rebuildPointerArray();
+	}
+
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static boolean contentEquals( java.util.List x, java.util.List y ) {
+		if ( x.size() != y.size() ) return false;
+		for( int i = 0; i < x.size(); i++ ) if ( ! java.util.Arrays.equals( (KEY_TYPE[])x.get( i ), (KEY_TYPE[])y.get( i ) ) ) return false;
+		return true;
+	}
+
+	private static int l[];
+	private static KEY_TYPE[][] a; 
+
+
+	private static void test( int n ) {
+		int c;
+
+		l = new int[ n ];
+		a = new KEY_TYPE[n][];
+
+		for( int i = 0; i < n; i++ ) l[i] = (int)(Math.abs(r.nextGaussian())*32);
+		for( int i = 0; i < n; i++ ) a[i] = new KEY_TYPE[l[i]];
+		for( int i = 0; i < n; i++ ) for( int j = 0; j < l[i]; j++ ) a[i][j] = genKey();
+
+		ARRAY_FRONT_CODED_LIST m = new ARRAY_FRONT_CODED_LIST( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 );
+		it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a );
+
+		//System.out.println(m);
+		//for( i = 0; i < t.size(); i++ ) System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i)));
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( contentEquals( m, t ), "Error (" + seed + "): m does not equal t at creation" );
+
+		/* Now we check cloning. */
+
+		ensure( contentEquals( m, (java.util.List)m.clone() ), "Error (" + seed + "): m does not equal m.clone()" );
+
+		/* Now we play with iterators. */
+
+		{
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			Object J;
+			i = m.listIterator(); 
+			j = t.listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( java.util.Arrays.equals( (KEY_TYPE[])i.next(), (KEY_TYPE[])j.next() ), "Error (" + seed + "): divergence in next()" );
+
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( java.util.Arrays.equals( (KEY_TYPE[])i.previous(), (KEY_TYPE[])j.previous() ), "Error (" + seed + "): divergence in previous()" );
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+
+		{
+			Object previous = null;
+			Object I, J;
+			int from = r.nextInt( m.size() +1 );
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator( from ); 
+			j = t.listIterator( from ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious() , "Error (" + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( java.util.Arrays.equals( (KEY_TYPE[])i.next(), (KEY_TYPE[])j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+					//System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( java.util.Arrays.equals( (KEY_TYPE[])i.previous(), (KEY_TYPE[])j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+
+				}
+			}
+
+		}
+		
+
+
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m = (ARRAY_FRONT_CODED_LIST)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+		ensure( contentEquals( m, t ), "Error (" + seed + "): m does not equal t after save/read" );
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+
+}
diff --git a/drv/ArrayIndirectDoublePriorityQueue.drv b/drv/ArrayIndirectDoublePriorityQueue.drv
new file mode 100644
index 0000000..88e5911
--- /dev/null
+++ b/drv/ArrayIndirectDoublePriorityQueue.drv
@@ -0,0 +1,635 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+import it.unimi.dsi.fastutil.IndirectDoublePriorityQueue;
+#endif
+
+/** A type-specific array-based indirect double priority queue.
+ *
+ * <P>Instances of this class are based on a single array. This implementation
+ * is extremely inefficient, but it is difficult to beat when the size of the
+ * queue is very small.  The array is enlarged as needed, but it is never
+ * shrunk. Use the {@link #trim()} method to reduce its size, if necessary.
+ *
+ * <P>Either comparator may be <code>null</code>, indicating that natural comparison should take place. Of course,
+ * it makes little sense having them equal.
+ */
+
+public class ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE KEY_GENERIC extends ARRAY_INDIRECT_PRIORITY_QUEUE KEY_GENERIC implements INDIRECT_DOUBLE_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The secondary comparator. */
+	protected KEY_COMPARATOR KEY_SUPER_GENERIC secondaryComparator;
+
+	/** Creates a new empty queue with a given capacity.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c, KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		super( refArray, capacity, c );
+		secondaryComparator = d;
+	}
+
+
+	/** Creates a new empty queue with a given capacity.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	@SuppressWarnings("unchecked")
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		super( refArray, capacity, c == null ? COMPARATORS.OPPOSITE_COMPARATOR : COMPARATORS.oppositeComparator( c ) );
+	}
+
+
+	/** Creates a new empty queue with a given capacity and natural order as primary comparator.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) {
+		this( refArray, capacity, null );
+	}
+
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c, KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		this( refArray, refArray.length, c, d );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, refArray.length, c );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and natural order as primary comparator.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray ) {
+		this( refArray, refArray.length, null );
+	}
+
+
+	/** Wraps a given array in a queue using the given comparators.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c, final KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		this( refArray, 0, c, d );
+		this.array = a;
+		this.size = size;
+	}
+
+	/** Wraps a given array in a queue using the given comparators.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c, final KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		this( refArray, a, a.length, c, d );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator and its opposite.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, 0, c );
+		this.array = a;
+		this.size = size;
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator and its opposite.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, a, a.length, c );
+	}
+
+	/** Wraps a given array in a queue using the natural order and its opposite.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) {
+		this( refArray, a, size, null );
+	}
+
+
+	/** Wraps a given array in a queue using the natural order and its opposite.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 */
+	public ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) {
+		this( refArray, a, a.length );
+	}
+
+	/** Returns the index (in {@link #array}) of the smallest element w.r.t. the {@linkplain #secondaryComparator secondary comparator}. */
+
+	@SuppressWarnings("unchecked")
+	private int findSecondaryFirst() {
+		int i = size;
+		int firstIndex = --i;
+		KEY_GENERIC_TYPE first = refArray[ array[ firstIndex ] ];
+
+		if ( secondaryComparator == null ) while( i-- != 0 ) { if ( KEY_LESS( refArray[ array[ i ] ], first ) ) first = refArray[ array[ firstIndex = i ] ]; }
+		else while( i-- != 0 ) { if ( secondaryComparator.compare( refArray[ array[ i ] ], first ) < 0 ) first = refArray[ array[ firstIndex = i ] ]; }
+
+		return firstIndex;
+	}
+
+	@SuppressWarnings("unchecked")
+	private int findSecondaryLast() {
+		int i = size;
+		int lastIndex = --i;
+		KEY_GENERIC_TYPE last = refArray[ array[ lastIndex ] ];
+
+		if ( secondaryComparator == null ) while( i-- != 0 ) { if ( KEY_LESS( last, refArray[ array[ i ] ] ) ) last = refArray[ array[ lastIndex = i ] ]; }
+		else while( i-- != 0 ) { if ( secondaryComparator.compare( last, refArray[ array[ i ] ] ) < 0 ) last = refArray[ array[ lastIndex = i ] ]; }
+
+		return lastIndex;
+	}
+
+
+	public int secondaryFirst() {
+		return array[ findSecondaryFirst() ];
+	}
+
+
+	public int secondaryLast() {
+		return array[ findSecondaryLast() ];
+	}
+
+	public int secondaryFront( int[] a ) {
+		final KEY_GENERIC_TYPE secondaryTop = refArray[ array[ findSecondaryFirst() ] ];
+		int i = size, c = 0;
+		while( i-- != 0 ) if ( KEY_EQUALS_NOT_NULL( secondaryTop, refArray[ array[ i ] ] ) ) a[ c++ ] = array[ i ];
+		return c;
+	}
+
+	public void changed( int i ) {}
+
+	/** Returns the secondary comparator of this queue.
+	 *
+	 * @return the secondary comparator of this queue.
+	 * @see #secondaryFirst()
+	 */
+	public KEY_COMPARATOR KEY_SUPER_GENERIC secondaryComparator() { return secondaryComparator; }
+
+#ifdef TEST
+
+	/** The original class, now just used for testing. */
+
+	private static class TestQueue {
+
+		/** The reference array */
+		private KEY_TYPE refArray[];
+		/** Its length */
+		private int N;
+		/** The number of elements in the heaps */
+		private int n;
+		/** The two comparators */
+		private KEY_COMPARATOR primaryComp, secondaryComp;
+		/** Two indirect heaps are used, called <code>primary</code> and <code>secondary</code>. Each of them contains
+			a permutation of <code>n</code> among the indices 0, 1, ..., <code>N</code>-1 in such a way that the corresponding
+			objects be sorted with respect to the two comparators.
+			We also need an array <code>inSec[]</code> so that <code>inSec[k]</code> is the index of <code>secondary</code> 
+			containing <code>k</code>.
+		*/
+		private int primary[], secondary[], inSec[];
+
+		/** Builds a double indirect priority queue.
+		 *  @param refArray The reference array.
+		 *  @param primaryComp The primary comparator.
+		 *  @param secondaryComp The secondary comparator.
+		 */
+		public TestQueue( KEY_TYPE refArray[], KEY_COMPARATOR primaryComp, KEY_COMPARATOR secondaryComp ) {
+			this.refArray = refArray;
+			this.N = refArray.length;
+			assert this.N != 0;
+			this.n = 0;
+			this.primaryComp = primaryComp;
+			this.secondaryComp = secondaryComp;
+			this.primary = new int[N];
+			this.secondary = new int[N];
+			this.inSec = new int[N];
+			java.util.Arrays.fill( inSec, -1 );
+		}
+
+		/** Adds an index to the queue. Notice that the index should not be already present in the queue.
+		 *  @param i The index to be added
+		 */
+		public void add( int i ) {
+			if ( i < 0 || i >= refArray.length ) throw new IndexOutOfBoundsException();
+			if ( inSec[ i ] >= 0 ) throw new IllegalArgumentException();
+			primary[n] = i;
+			secondary[n] = i; inSec[i] = n;
+			n++;
+			swimPrimary( n-1 );
+			swimSecondary( n-1 );
+		}
+
+		/** Heapify the primary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifyPrimary( int i ) {
+			int dep = primary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && primaryComp.compare( refArray[primary[child+1]], refArray[primary[child]] ) < 0 ) child++;
+				if ( primaryComp.compare( refArray[dep], refArray[primary[child]] ) <= 0 ) break;
+				primary[i] = primary[child];
+				i = child;
+			}
+			primary[i] = dep;
+		}
+
+		/** Heapify the secondary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifySecondary( int i ) {
+			int dep = secondary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && secondaryComp.compare( refArray[secondary[child+1]], refArray[secondary[child]] ) < 0 ) child++;
+				if ( secondaryComp.compare( refArray[dep], refArray[secondary[child]] ) <= 0 ) break;
+				secondary[i] = secondary[child]; inSec[secondary[i]] = i;
+				i = child;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+		}
+
+		/** Swim and heapify the primary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimPrimary( int i ) {
+			int dep = primary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( primaryComp.compare( refArray[primary[parent]], refArray[dep] ) <= 0 ) break;
+				primary[i] = primary[parent];
+				i = parent;
+			}
+			primary[i] = dep;
+			heapifyPrimary( i );
+		}
+
+		/** Swim and heapify the secondary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimSecondary( int i ) {
+			int dep = secondary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( secondaryComp.compare( refArray[secondary[parent]], refArray[dep] ) <= 0 ) break;
+				secondary[i] = secondary[parent]; inSec[secondary[i]] = i;
+				i = parent;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+			heapifySecondary( i );
+		}
+
+		/** Returns the minimum element with respect to the primary comparator.
+			@return the minimum element.
+		*/
+		public int top() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			return primary[0];
+		}
+
+		/** Returns the minimum element with respect to the secondary comparator.
+			@return the minimum element.
+		*/
+		public int secTop() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			return secondary[0];
+		}
+
+		/** Removes the minimum element with respect to the primary comparator.
+		 *  @return the removed element.
+		 */
+		public void remove() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			int result = primary[0];
+			int ins = inSec[result];
+			inSec[ result ] = -1;
+			// Copy a leaf 
+			primary[0] = primary[n-1];
+			if ( ins == n-1 ) {
+				n--;
+				heapifyPrimary( 0 );	
+				return;
+			}
+			secondary[ins] = secondary[n-1]; 
+			inSec[secondary[ins]] = ins;
+			// Heapify
+			n--;
+			heapifyPrimary( 0 );
+			swimSecondary( ins );
+		}
+
+		public void clear() {
+			while( size() != 0 ) remove();
+		}
+
+		public void remove( int index ) {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			int result = primary[index];
+			int ins = inSec[result];
+			inSec[ result ] = -1;
+			// Copy a leaf 
+			primary[index] = primary[n-1];
+			if ( ins == n-1 ) {
+				n--;
+				swimPrimary( index );	
+				return;
+			}
+			secondary[ins] = secondary[n-1]; 
+			inSec[secondary[ins]] = ins;
+			// Heapify
+			n--;
+			swimPrimary( index );
+			swimSecondary( ins );
+		}
+
+		/** Signals that the minimum element with respect to the comparator has changed.
+		 */
+		public void change() {
+			int ins = inSec[primary[0]];
+			heapifyPrimary( 0 );
+			swimSecondary( ins );
+		}
+
+		public void change(int index) {
+			int ins = inSec[primary[index]];
+			swimPrimary( index );
+			swimSecondary( ins );
+		}
+
+		/** Returns the number of elements in the queue.
+		 *  @return the size of the queue
+		 */
+		public int size() {
+			return n;
+		}
+	}
+
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) {
+		if ( sizea != sizeb ) return false;
+		while( sizea-- != 0 ) if ( a[sizea] != b[sizea] ) return false;
+		return true;
+	}
+
+	private static boolean invEqual( int inva[], int[] invb ) {
+		int i = inva.length;
+		while( i-- != 0 ) if ( inva[ i ] != invb[ i ] ) return false;
+		return true;
+	}
+
+
+
+	protected static void test( int n ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement;
+		int rm = 0, rt = 0;
+		KEY_TYPE[] refArray = new KEY_TYPE[ n ];
+
+		for( int i = 0; i < n; i++ ) refArray[ i ] = genKey();
+		  
+		HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE m = new HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( refArray );
+		TestQueue t = new TestQueue( refArray, COMPARATORS.NATURAL_COMPARATOR, COMPARATORS.OPPOSITE_COMPARATOR );
+
+		/* We add pairs to t. */
+		for( int i = 0; i < n / 2;  i++ ) {
+			t.add( i );
+			m.enqueue( i );
+		}
+		
+		ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after creation (" + m + ", " + t + ")" );
+		ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after creation (" + m + ", " + t + ")" );
+		ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after creation (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+			if ( r.nextDouble() < 0.01 ) {
+				t.clear();
+				m.clear();
+				for( int j = 0; j < n / 2;  j++ ) {
+					t.add( j );
+					m.enqueue( j );
+				}
+			}
+
+			int T = r.nextInt( 2 * n );
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				m.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+
+			try {
+				t.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after enqueue (" + m + ", " + t + ")" );
+			ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after enqueue (" + m + ", " + t + ")" );
+			ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after enqueue (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+			
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + ", " + t.top() + ")");
+				ensure( m.secondaryFirst() == t.secTop(), "Error (" + seed + "): m and t differ in secondary first element after enqueue (" + m.secondaryFirst() + ", " + t.secTop() + ")");
+			}
+
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				rm = m.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				rt = t.top();
+				t.remove();
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after dequeue (" + m + ", " + t + ")" );
+			ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after dequeue (" + m + ", " + t + ")" );
+			ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after dequeue (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.first() + ", " + t.top() + ")");
+				ensure( m.secondaryFirst() == t.secTop(), "Error (" + seed + "): m and t differ in secondary first element after dequeue (" + m.secondaryFirst() + ", " + t.secTop() + ")");
+			}
+
+			if ( m.size() != 0 ) {
+
+				refArray[ m.first() ] = genKey();
+				
+				m.changed();
+				t.change();
+				
+				ensure( m.size() == t.size(), "Error (" + seed + "): m and t differ in size after change (" + m.size() + ", " + t.size() + ")");
+				
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + ", " + t.top() + ")");
+				ensure( m.secondaryFirst() == t.secTop(), "Error (" + seed + "): m and t differ in secondary first element after change (" + m.secondaryFirst() + ", " + t.secTop() + ")");
+			}
+		}
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		m.clear();
+		ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" );
+
+		System.out.println("Test OK");
+	}
+
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/ArrayIndirectPriorityQueue.drv b/drv/ArrayIndirectPriorityQueue.drv
new file mode 100644
index 0000000..78eb704
--- /dev/null
+++ b/drv/ArrayIndirectPriorityQueue.drv
@@ -0,0 +1,650 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+import it.unimi.dsi.fastutil.IndirectPriorityQueue;
+#endif
+
+import it.unimi.dsi.fastutil.ints.IntArrays;
+import it.unimi.dsi.fastutil.AbstractIndirectPriorityQueue;
+
+import java.util.NoSuchElementException;
+
+/** A type-specific array-based semi-indirect priority queue. 
+ *
+ * <P>Instances of this class use as reference list a <em>reference array</em>,
+ * which must be provided to each constructor, and represent a priority queue
+ * using a backing array of integer indices—all operations are performed
+ * directly on the array. The array is enlarged as needed, but it is never
+ * shrunk. Use the {@link #trim()} method to reduce its size, if necessary.
+ *
+ * <P>This implementation is extremely inefficient, but it is difficult to beat
+ * when the size of the queue is very small. Moreover, it allows to enqueue several
+ * time the same index, without limitations.
+ */
+
+public class ARRAY_INDIRECT_PRIORITY_QUEUE KEY_GENERIC extends AbstractIndirectPriorityQueue<KEY_GENERIC_CLASS> implements INDIRECT_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The reference array. */
+	protected KEY_GENERIC_TYPE refArray[];
+
+	/** The backing array. */
+	protected int array[] = IntArrays.EMPTY_ARRAY;
+
+	/** The number of elements in this queue. */
+	protected int size;
+ 	
+	/** The type-specific comparator used in this queue. */
+	protected KEY_COMPARATOR KEY_SUPER_GENERIC c;
+
+	/** The first index, cached, if {@link #firstIndexValid} is true. */
+	protected int firstIndex;
+
+	/** Whether {@link #firstIndex} contains a valid value. */
+	protected boolean firstIndexValid;
+	
+
+	/** Creates a new empty queue without elements with a given capacity and comparator.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		if ( capacity > 0 ) this.array = new int[ capacity ];
+		this.refArray = refArray;
+		this.c = c;
+	}
+
+	/** Creates a new empty queue with given capacity and using the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 */
+	public ARRAY_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) {
+		this( refArray, capacity, null );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and a given comparator.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, refArray.length, c );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and using the natural order. 
+	 * @param refArray the reference array.
+	 */
+	public ARRAY_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray ) {
+		this( refArray, refArray.length, null );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, 0, c );
+		this.array = a;
+		this.size = size;
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, a, a.length, c );
+	}
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 */
+	public ARRAY_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) {
+		this( refArray, a, size, null );
+	}
+
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 */
+	public ARRAY_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) {
+		this( refArray, a, a.length );
+	}
+
+
+	/** Returns the index (in {@link #array}) of the smallest element. */
+
+	@SuppressWarnings("unchecked")
+	private int findFirst() {
+		if ( firstIndexValid ) return this.firstIndex;
+		firstIndexValid = true;
+		int i = size;
+		int firstIndex = --i;
+		KEY_GENERIC_TYPE first = refArray[ array[ firstIndex ] ];
+
+		if ( c == null ) while( i-- != 0 ) { if ( KEY_LESS( refArray[ array[ i ] ], first ) ) first = refArray[ array[ firstIndex = i ] ]; }
+		else while( i-- != 0 ) { if ( c.compare( refArray[ array[ i ] ], first ) < 0 ) first = refArray[ array[ firstIndex = i ] ]; }
+
+		return this.firstIndex = firstIndex;
+	}
+
+	/** Returns the index (in {@link #array}) of the largest element. */
+
+	@SuppressWarnings("unchecked")
+	private int findLast() {
+		int i = size;
+		int lastIndex = --i;
+		KEY_GENERIC_TYPE last = refArray[ array[ lastIndex ] ];
+
+		if ( c == null ) { while( i-- != 0 ) if ( KEY_LESS( last, refArray[ array[ i ] ] ) ) last = refArray[ array[ lastIndex = i ] ]; }
+		else { while( i-- != 0 ) if ( c.compare( last, refArray[ array[ i ] ] ) < 0 ) last = refArray[ array[ lastIndex = i ] ]; }
+
+		return lastIndex;
+	}
+
+	protected final void ensureNonEmpty() {
+		if ( size == 0 ) throw new NoSuchElementException();
+	}
+
+	/** Ensures that the given index is a firstIndexValid reference.
+	 *
+	 * @param index an index in the reference array.
+	 * @throws IndexOutOfBoundsException if the given index is negative or larger than the reference array length.
+	 */
+	protected void ensureElement( final int index ) {
+		if ( index < 0 )  throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" );
+		if ( index >= refArray.length ) throw new IndexOutOfBoundsException( "Index (" + index + ") is larger than or equal to reference array size (" + refArray.length + ")" );
+	}
+	
+	/** Enqueues a new element.
+	 *
+	 * <P>Note that for efficiency reasons this method will <em>not</em> throw an exception
+	 * when <code>x</code> is already in the queue. However, the queue state will become
+	 * inconsistent and the following behaviour will not be predictable.
+	 */
+	@SuppressWarnings("unchecked")
+	public void enqueue( int x ) {
+		ensureElement( x );
+		
+		if ( size == array.length ) array = IntArrays.grow( array, size + 1 );
+		if ( firstIndexValid ) {
+			if ( c == null ) { if ( KEY_LESS( refArray[ x ], refArray[ array[ firstIndex ] ] ) ) firstIndex = size; }
+			else if ( c.compare( refArray[ x ], refArray[ array[ firstIndex ] ] ) < 0 ) firstIndex = size;
+		}
+		else firstIndexValid = false;
+		array[ size++ ] = x;
+	}
+
+	public int dequeue() {
+		ensureNonEmpty();
+		final int firstIndex = findFirst();
+		final int result = array[ firstIndex ];
+		if ( --size != 0 ) System.arraycopy( array, firstIndex + 1, array, firstIndex, size - firstIndex );
+		firstIndexValid = false;
+		return result;
+	}
+
+	public int first() {
+		ensureNonEmpty();
+		return array[ findFirst() ];
+	}
+
+	public int last() {
+		ensureNonEmpty();
+		return array[ findLast() ];
+	}
+
+	public void changed() {
+		ensureNonEmpty();
+		firstIndexValid = false;
+	}
+
+	/** {@inheritDoc}
+	 *
+	 * <P>Note that for efficiency reasons this method will <em>not</em> throw an exception
+	 * when <code>index</code> is not in the queue.
+	 */
+
+	public void changed( int index ) {
+		ensureElement( index );
+		if ( index == firstIndex ) firstIndexValid = false;
+	}
+
+	public void allChanged() {
+		firstIndexValid = false;
+	}
+
+	public boolean remove( int index ) {
+		ensureElement( index );
+		final int[] a = array;
+		int i = size;
+		while( i-- != 0 ) if ( a[ i ] == index ) break;
+		if ( i < 0 ) return false;
+		firstIndexValid = false;
+		if ( --size != 0 ) System.arraycopy( a, i + 1, a, i, size - i );
+		return true;
+	}
+
+	public int front( int[] a ) {
+		final KEY_GENERIC_TYPE top = refArray[ array[ findFirst() ] ];
+		int i = size, c = 0;
+		while( i-- != 0 ) if ( KEY_EQUALS_NOT_NULL( top, refArray[ array[ i ] ] ) ) a[ c++ ] = array[ i ];
+		return c;
+	}
+
+	public int size() { return size; }
+
+	public void clear() { size = 0; firstIndexValid = false; }
+
+	/** Trims the backing array so that it has exactly {@link #size()} elements.
+	 */
+
+	public void trim() {
+		array = IntArrays.trim( array, size );
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return c; }
+
+	public String toString() {
+		StringBuffer s = new StringBuffer();
+		s.append( "[" );
+		for ( int i = 0; i < size; i++ ) {
+			if ( i != 0 ) s.append( ", " );
+			s.append( refArray[ array [ i ] ] );
+		}
+		s.append( "]" );
+		return s.toString();
+	}
+
+
+#ifdef TEST
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		int i, j, s;
+		ARRAY_INDIRECT_PRIORITY_QUEUE[] m = new ARRAY_INDIRECT_PRIORITY_QUEUE[ 100000 ];
+		HEAP_INDIRECT_PRIORITY_QUEUE[] t = new HEAP_INDIRECT_PRIORITY_QUEUE[ m.length ];
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[m.length];
+		long ms;
+
+		for( i = 0; i < n; i++ ) k[i] = genKey();
+		for( i = 0; i < m.length; i++ ) nk[i] = genKey();
+		  
+		double totEnq = 0, totDeq = 0, totChange = 0, d;
+
+		for( i = 0; i < m.length; i++ ) {
+			t[ i ] = new HEAP_INDIRECT_PRIORITY_QUEUE( k );
+			m[ i ] = new ARRAY_INDIRECT_PRIORITY_QUEUE( k );
+		}
+
+		if ( comp ) {
+			for( j = 0; j < 20; j++ ) {
+
+				for( i = 0; i < m.length; i++ ) t[ i ].clear();
+
+				ms = System.currentTimeMillis();
+				s = m.length;
+				while( s-- != 0 ) { i = n; while( i-- != 0 ) t[ s ].enqueue( i ); }
+				d = System.currentTimeMillis() - ms;
+				if ( j > 2 ) totEnq += d; 				
+				System.out.print("Enqueue: " + format( m.length * n/d ) +" K/s " );
+
+				ms = System.currentTimeMillis();
+				s = m.length;
+				while( s-- != 0 ) { i = n; while( i-- != 0 ) { k[ t[ s ].first() ] = nk[ i ]; t[ s ].changed(); } }
+				d = System.currentTimeMillis() - ms;
+				if ( j > 2 ) totChange += d; 				
+				System.out.print("Change: " + format( m.length * n/d ) +" K/s " );
+
+				ms = System.currentTimeMillis();
+				s = m.length;
+				while( s-- != 0 ) { i = n; while( i-- != 0 ) t[ s ].dequeue(); }
+				d = System.currentTimeMillis() - ms;
+				if ( j > 2 ) totDeq += d; 				
+				System.out.print("Dequeue: " + format( m.length * n/d ) +" K/s " );
+
+				System.out.println();
+			}
+
+			System.out.println();
+			System.out.println( "Heap:  Enqueue: " + format( m.length * (j-3)*n/totEnq ) + " K/s Dequeue: " + format( m.length * (j-3)*n/totDeq )  + " K/s Change: " + format( m.length * (j-3)*n/totChange ) + " K/s" );
+
+			System.out.println();
+
+			totEnq = totChange = totDeq = 0;
+
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			for( i = 0; i < m.length; i++ ) m[ i ].clear();
+
+			ms = System.currentTimeMillis();
+			s = m.length;
+			while( s-- != 0 ) { i = n; while( i-- != 0 ) m[ s ].enqueue( i ); }
+			d = System.currentTimeMillis() - ms;
+			if ( j > 2 ) totEnq += d; 				
+			System.out.print("Enqueue: " + format( m.length * n/d ) +" K/s " );
+
+			ms = System.currentTimeMillis();
+			s = m.length;
+			while( s-- != 0 ) { i = n; while( i-- != 0 ) { k[ m[ s ].first() ] = nk[ i ]; m[ s ].changed(); } }
+			d = System.currentTimeMillis() - ms;
+			if ( j > 2 ) totChange += d; 				
+			System.out.print("Change: " + format( m.length * n/d ) +" K/s " );
+
+			ms = System.currentTimeMillis();
+			s = m.length;
+			while( s-- != 0 ) { i = n; while( i-- != 0 ) m[ s ].dequeue(); }
+			d = System.currentTimeMillis() - ms;
+			if ( j > 2 ) totDeq += d; 				
+			System.out.print("Dequeue: " + format( m.length * n/d ) +" K/s " );
+			
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "Array:  Enqueue: " + format( m.length * (j-3)*n/totEnq ) + " K/s Dequeue: " + format( m.length * (j-3)*n/totDeq )  + " K/s Change: " + format( m.length * (j-3)*n/totChange ) + " K/s" );
+
+		System.out.println();
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) {
+		if ( sizea != sizeb ) return false;
+		KEY_TYPE[] aa = new KEY_TYPE[ sizea ];
+		KEY_TYPE[] bb = new KEY_TYPE[ sizea ];
+		for( int i = 0; i < sizea; i++ ) {
+			aa[ i ] = ref[ a[ i ] ];
+			bb[ i ] = ref[ b[ i ] ];
+		}
+		java.util.Arrays.sort( aa );
+		java.util.Arrays.sort( bb );
+		while( sizea-- != 0 ) if ( !KEY_EQUALS(aa[sizea], bb[sizea]) ) return false;
+		return true;
+	}
+
+	private static KEY_TYPE[] ref;
+
+	protected static void test( int n ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement;
+		int rm = 0, rt = 0;
+
+		ref = new KEY_TYPE[ n ];
+
+		for( int i = 0; i < n; i++ ) ref[ i ] = genKey();
+		  
+		ARRAY_INDIRECT_PRIORITY_QUEUE m = new ARRAY_INDIRECT_PRIORITY_QUEUE( ref );
+		HEAP_INDIRECT_PRIORITY_QUEUE t = new HEAP_INDIRECT_PRIORITY_QUEUE( ref );
+
+		/* We add pairs to t. */
+		for( int i = 0; i < n / 2;  i++ ) {
+			t.enqueue( i );
+			m.enqueue( i );
+		}
+
+		ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after creation (" + m + ", " + t + ")" );
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+			if ( r.nextDouble() < 0.01 ) {
+				t.clear();
+				m.clear();
+				for( int j = 0; j < n / 2;  j++ ) {
+					t.enqueue( j );
+					m.enqueue( j );
+				}
+			}
+
+			int T = r.nextInt( 2 * n );
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				t.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+
+			if ( tThrowsIllegal == null ) { // To skip duplicates
+				try {
+					m.enqueue( T );
+				}
+				catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+				catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			}
+
+			mThrowsIllegal = tThrowsIllegal = null; // To skip duplicates
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+
+			ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after enqueue (" + m + ", " + t + ")" );
+			
+			if ( m.size() != 0 ) {
+				ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")");
+			}
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				rm = m.dequeue();
+				while( ! m.isEmpty() && KEY_EQUALS( ref[ m.first() ], ref[ rm ] ) ) m.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				rt = t.dequeue();
+				while( ! t.isEmpty() && KEY_EQUALS( ref[ t.first() ], ref[ rt ] ) ) t.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( KEY_EQUALS( ref[ rt ], ref[ rm ]), "Error (" + seed + "): divergence in dequeue() between m and t (" + rm + "->" + ref[ rm ] + ", " + rt + "->" + ref[ rt ] + ")" );
+
+
+			ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after dequeue (" + m + ", " + t + ")");
+			
+			if ( m.size() != 0 ) {
+				ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")");
+			}
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+
+			int pos = r.nextInt( n * 2 );
+
+			try {
+				m.remove( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				t.remove( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): remove(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): remove(int) divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): remove(int) divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+			ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after remove(int) (" + m + ", " + t + ")" );
+
+			if ( m.size() != 0 ) {
+				ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after remove(int) (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")");
+			}
+
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			pos = r.nextInt( n );
+
+			try {
+				t.changed( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			if ( tThrowsIllegal == null ) {
+				try {
+					m.changed( pos );
+				}
+				catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+				catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+				catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+			}
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): change(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			//ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): change(int) divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): change(int) divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+			ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change(int) (" + m + ", " + t + ")" );
+
+			if ( m.size() != 0 ) {
+				ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after change(int) (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")");
+			}
+
+			int[] temp = (int[])t.heap.clone();
+			java.util.Arrays.sort( temp, 0, t.size() ); // To scramble a bit
+			m = new ARRAY_INDIRECT_PRIORITY_QUEUE( m.refArray, temp, t.size() );
+
+			ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after wrap (" + m + ", " + t + ")" );
+
+			if ( m.size() != 0 ) {
+				ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after wrap (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")");
+			}
+
+			if ( m.size() != 0 && ( ( new it.unimi.dsi.fastutil.ints.IntOpenHashSet( m.array, 0, m.size ) ).size() == m.size() ) ) {
+
+				int first = m.first();
+				ref[ first ] = genKey();
+
+				//System.err.println("Pre-change m: " +m );
+				//System.err.println("Pre-change t: " +t );
+				m.changed();
+				t.changed( first );
+				
+				//System.err.println("Post-change m: " +m );
+				//System.err.println("Post-change t: " +t );
+
+				ensure( heapEqual( m.array, t.heap, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change (" + m + ", " + t + ")");
+				
+				if ( m.size() != 0 ) {
+					ensure( KEY_EQUALS( ref[ m.first() ], ref[ t.first() ] ), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")");
+				}
+			}
+		}
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		m.clear();
+		ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" );
+
+		System.out.println("Test OK");
+	}
+
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+}
diff --git a/drv/ArrayList.drv b/drv/ArrayList.drv
new file mode 100644
index 0000000..fc2bfeb
--- /dev/null
+++ b/drv/ArrayList.drv
@@ -0,0 +1,1270 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.Arrays;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.RandomAccess;
+import java.util.NoSuchElementException;
+
+#if #keys(primitive)
+
+/** A type-specific array-based list; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>This class implements a lightweight, fast, open, optimized,
+ * reuse-oriented version of array-based lists. Instances of this class
+ * represent a list with an array that is enlarged as needed when new entries
+ * are created (by doubling its current length), but is
+ * <em>never</em> made smaller (even on a {@link #clear()}). A family of
+ * {@linkplain #trim() trimming methods} lets you control the size of the
+ * backing array; this is particularly useful if you reuse instances of this class.
+ * Range checks are equivalent to those of {@link java.util}'s classes, but
+ * they are delayed as much as possible. The backing array is exposed by the
+ * {@link #elements()} method.
+ *
+ * <p>This class implements the bulk methods <code>removeElements()</code>,
+ * <code>addElements()</code> and <code>getElements()</code> using
+ * high-performance system calls (e.g., {@link
+ * System#arraycopy(Object,int,Object,int,int) System.arraycopy()} instead of
+ * expensive loops.
+ *
+ * @see java.util.ArrayList
+ */
+
+public class ARRAY_LIST KEY_GENERIC extends ABSTRACT_LIST KEY_GENERIC implements RandomAccess, Cloneable, java.io.Serializable {
+	private static final long serialVersionUID = -7046029254386353130L;
+
+
+#else
+
+/** A type-specific array-based list; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>This class implements a lightweight, fast, open, optimized,
+ * reuse-oriented version of array-based lists. Instances of this class
+ * represent a list with an array that is enlarged as needed when new entries
+ * are created (by doubling the current length), but is
+ * <em>never</em> made smaller (even on a {@link #clear()}). A family of
+ * {@linkplain #trim() trimming methods} lets you control the size of the
+ * backing array; this is particularly useful if you reuse instances of this class.
+ * Range checks are equivalent to those of {@link java.util}'s classes, but
+ * they are delayed as much as possible. 
+ *
+ * <p>The backing array is exposed by the {@link #elements()} method. If an instance
+ * of this class was created {@linkplain #wrap(Object[],int) by wrapping}, 
+ * backing-array reallocations will be performed using reflection, so that
+ * {@link #elements()} can return an array of the same type of the original array; the comments
+ * about efficiency made in {@link it.unimi.dsi.fastutil.objects.ObjectArrays} apply here.
+ *
+ * <p>This class implements the bulk methods <code>removeElements()</code>,
+ * <code>addElements()</code> and <code>getElements()</code> using
+ * high-performance system calls (e.g., {@link
+ * System#arraycopy(Object,int,Object,int,int) System.arraycopy()} instead of
+ * expensive loops.
+ *
+ * @see java.util.ArrayList
+ */
+
+public class ARRAY_LIST KEY_GENERIC extends ABSTRACT_LIST KEY_GENERIC implements RandomAccess, Cloneable, java.io.Serializable {
+	private static final long serialVersionUID = -7046029254386353131L;
+
+
+#endif
+
+	/** The initial default capacity of an array list. */
+	public final static int DEFAULT_INITIAL_CAPACITY = 16;
+
+#if ! #keys(primitive)
+	/** Whether the backing array was passed to <code>wrap()</code>. In
+	 * this case, we must reallocate with the same type of array. */
+	protected final boolean wrapped;
+#endif
+
+	/** The backing array. */
+	protected transient KEY_GENERIC_TYPE a[];
+
+	/** The current actual size of the list (never greater than the backing-array length). */
+	protected int size;
+
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	/** Creates a new array list using a given array.
+	 *
+	 * <P>This constructor is only meant to be used by the wrapping methods.
+	 *
+	 * @param a the array that will be used to back this array list.
+	 */
+
+	@SuppressWarnings("unused")
+	protected ARRAY_LIST( final KEY_GENERIC_TYPE a[], boolean dummy ) {
+		this.a = a;
+#if ! #keys(primitive)
+		this.wrapped = true;
+#endif
+	}
+
+	/** Creates a new array list with given capacity.
+	 *
+	 * @param capacity the initial capacity of the array list (may be 0).
+	 */
+
+	@SuppressWarnings("unchecked")
+	public ARRAY_LIST( final int capacity ) {
+		if ( capacity < 0 ) throw new IllegalArgumentException( "Initial capacity (" + capacity + ") is negative" );
+
+		a = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ capacity ];
+#if ! #keys(primitive)
+		wrapped = false;
+#endif
+	}
+
+	/** Creates a new array list with {@link #DEFAULT_INITIAL_CAPACITY} capacity.
+	 */
+	 
+	public ARRAY_LIST() {
+		this( DEFAULT_INITIAL_CAPACITY );
+	}
+
+	/** Creates a new array list and fills it with a given collection.
+	 *
+	 * @param c a collection that will be used to fill the array list.
+	 */
+	 
+	public ARRAY_LIST( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		this( c.size() );
+#if #keys(primitive)
+		size = ITERATORS.unwrap( ITERATORS.AS_KEY_ITERATOR( c.iterator() ), a );
+#else
+		size = ITERATORS.unwrap( c.iterator(), a );
+#endif
+	}
+
+	/** Creates a new array list and fills it with a given type-specific collection.
+	 *
+	 * @param c a type-specific collection that will be used to fill the array list.
+	 */
+	 
+	public ARRAY_LIST( final COLLECTION KEY_EXTENDS_GENERIC c ) {
+		this( c.size() );
+		size = ITERATORS.unwrap( c.iterator(), a );
+	}
+
+	/** Creates a new array list and fills it with a given type-specific list.
+	 *
+	 * @param l a type-specific list that will be used to fill the array list.
+	 */
+	 
+	public ARRAY_LIST( final LIST KEY_EXTENDS_GENERIC l ) {
+		this( l.size() );
+		l.getElements( 0, a, 0, size = l.size() );
+	}
+
+	/** Creates a new array list and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the array list.
+	 */
+	 
+	public ARRAY_LIST( final KEY_GENERIC_TYPE a[] ) {
+		this( a, 0, a.length );
+	}
+
+	/** Creates a new array list and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the array list.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 */
+	 
+	public ARRAY_LIST( final KEY_GENERIC_TYPE a[], final int offset, final int length ) {
+		this( length );
+		System.arraycopy( a, offset, this.a, 0, length );
+		size = length;
+	}
+
+	/** Creates a new array list and fills it with the elements returned by an iterator..
+	 *
+	 * @param i an iterator whose returned elements will fill the array list.
+	 */
+	 
+	public ARRAY_LIST( final Iterator<? extends KEY_GENERIC_CLASS> i ) {
+		this();
+		while( i.hasNext() ) this.add( i.next() );
+	}
+
+	/** Creates a new array list and fills it with the elements returned by a type-specific iterator..
+	 *
+	 * @param i a type-specific iterator whose returned elements will fill the array list.
+	 */
+	 
+	public ARRAY_LIST( final KEY_ITERATOR KEY_EXTENDS_GENERIC i ) {
+		this();
+		while( i.hasNext() ) this.add( i.NEXT_KEY() );
+	}
+
+#if #keys(primitive)
+	/** Returns the backing array of this list.
+	 *
+	 * @return the backing array.
+	 */
+
+	public KEY_GENERIC_TYPE[] elements() {
+		return a;
+	}
+#else
+	/** Returns the backing array of this list.
+	 *
+	 * <P>If this array list was created by wrapping a given array, it is guaranteed
+	 * that the type of the returned array will be the same. Otherwise, the returned
+	 * array will be of type {@link Object Object[]} (in spite of the declared return type).
+	 * 
+	 * <strong>Warning</strong>: This behaviour may cause (unfathomable) 
+	 * run-time errors if a method expects an array
+	 * actually of type <code>K[]</code>, but this methods returns an array
+	 * of type {@link Object Object[]}.
+	 *
+	 * @return the backing array.
+	 */
+
+	public K[] elements() {
+		return a;
+	}
+#endif
+
+	/** Wraps a given array into an array list of given size.
+	 *
+	 * @param a an array to wrap.
+	 * @param length the length of the resulting array list.
+	 * @return a new array list of the given size, wrapping the given array.
+	 */
+
+	public static KEY_GENERIC ARRAY_LIST KEY_GENERIC wrap( final KEY_GENERIC_TYPE a[], final int length ) {
+		if ( length > a.length ) throw new IllegalArgumentException( "The specified length (" + length + ") is greater than the array size (" + a.length + ")" );
+		final ARRAY_LIST KEY_GENERIC l = new ARRAY_LIST KEY_GENERIC( a, false );
+		l.size = length;
+		return l;
+	}
+
+	/** Wraps a given array into an array list.
+	 *
+	 * @param a an array to wrap.
+	 * @return a new array list wrapping the given array.
+	 */
+
+	public static KEY_GENERIC ARRAY_LIST KEY_GENERIC wrap( final KEY_GENERIC_TYPE a[] ) {
+		return wrap( a, a.length );
+	}
+
+
+	/** Ensures that this array list can contain the given number of entries without resizing.
+	 *
+	 * @param capacity the new minimum capacity for this array list.
+	 */
+	@SuppressWarnings("unchecked")
+	public void ensureCapacity( final int capacity ) {
+#if #keys(primitive)
+		a = ARRAYS.ensureCapacity( a, capacity, size );
+#else
+		if ( wrapped ) a = ARRAYS.ensureCapacity( a, capacity, size );
+		else {
+			if ( capacity > a.length ) {
+				final Object t[] = new Object[ capacity ];
+				System.arraycopy( a, 0, t, 0, size );
+				a = (KEY_GENERIC_TYPE[])t;
+			}
+		}
+#endif
+		if ( ASSERTS ) assert size <= a.length;
+	}
+
+	/** Grows this array list, ensuring that it can contain the given number of entries without resizing,
+	 * and in case enlarging it at least by a factor of two.
+	 *
+	 * @param capacity the new minimum capacity for this array list.
+	 */
+	@SuppressWarnings("unchecked")
+	private void grow( final int capacity ) {
+#if #keys(primitive)
+		a = ARRAYS.grow( a, capacity, size );
+#else
+		if ( wrapped ) a = ARRAYS.grow( a, capacity, size );
+		else {
+			if ( capacity > a.length ) {
+				final int newLength = (int)Math.min( Math.max( 2L * a.length, capacity ), Arrays.MAX_ARRAY_SIZE );
+				final Object t[] = new Object[ newLength ];
+				System.arraycopy( a, 0, t, 0, size );
+				a = (KEY_GENERIC_TYPE[])t;
+			}			
+		}
+#endif
+		if ( ASSERTS ) assert size <= a.length;
+	}
+
+	public void add( final int index, final KEY_GENERIC_TYPE k ) {
+		ensureIndex( index );
+		grow( size + 1 );
+		if ( index != size ) System.arraycopy( a, index, a, index + 1, size - index );
+		a[ index ] = k;
+		size++;
+		if ( ASSERTS ) assert size <= a.length;
+	}
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		grow( size + 1 );
+		a[ size++ ] = k;
+		if ( ASSERTS ) assert size <= a.length;
+		return true;
+	}
+
+	public KEY_GENERIC_TYPE GET_KEY( final int index ) {
+		if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" );
+		return a[ index ];
+	}
+
+	public int indexOf( final KEY_TYPE k ) {
+		for( int i = 0; i < size; i++ ) if ( KEY_EQUALS( k, a[ i ] ) ) return i;
+		return -1;
+	}
+
+
+	public int lastIndexOf( final KEY_TYPE k ) {
+		for( int i = size; i-- != 0; ) if ( KEY_EQUALS( k, a[ i ] ) ) return i;
+		return -1;
+	}
+
+	public KEY_GENERIC_TYPE REMOVE_KEY( final int index ) {
+		if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" );
+		final KEY_GENERIC_TYPE old = a[ index ];
+		size--;
+		if ( index != size ) System.arraycopy( a, index + 1, a, index, size - index );
+#if #keys(reference)
+		a[ size ] = null;
+#endif
+		if ( ASSERTS ) assert size <= a.length;
+		return old;
+	}
+
+	public boolean rem( final KEY_TYPE k ) {
+		int index = indexOf( k );
+		if ( index == -1 ) return false;
+		REMOVE_KEY( index );
+		if ( ASSERTS ) assert size <= a.length;
+		return true;
+	}
+
+#if #keys(reference)
+	public boolean remove( final Object o ) {
+		return rem( o );
+	}
+#endif
+
+	public KEY_GENERIC_TYPE set( final int index, final KEY_GENERIC_TYPE k ) {
+		if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" );
+		KEY_GENERIC_TYPE old = a[ index ];
+		a[ index ] = k;
+		return old;
+	}
+
+	public void clear() {
+#if #keys(reference)
+		ARRAYS.fill( a, 0, size, null );
+#endif
+		size = 0;
+		if ( ASSERTS ) assert size <= a.length;
+	}
+
+	public int size() {
+		return size;
+	}		
+
+	public void size( final int size ) {
+		if ( size > a.length ) ensureCapacity( size );
+		if ( size > this.size ) ARRAYS.fill( a, this.size, size, KEY_NULL );
+#if #keys(reference)
+		else ARRAYS.fill( a, size, this.size, KEY_NULL );
+#endif
+		this.size = size;
+	}		
+
+	public boolean isEmpty() {
+		return size == 0;
+	}		
+
+	/** Trims this array list so that the capacity is equal to the size. 
+	 *
+	 * @see java.util.ArrayList#trimToSize()
+	 */
+	public void trim() {
+		trim( 0 );
+	}
+
+	/** Trims the backing array if it is too large.
+	 * 
+	 * If the current array length is smaller than or equal to
+	 * <code>n</code>, this method does nothing. Otherwise, it trims the
+	 * array length to the maximum between <code>n</code> and {@link #size()}.
+	 *
+	 * <P>This method is useful when reusing lists.  {@linkplain #clear() Clearing a
+	 * list} leaves the array length untouched. If you are reusing a list
+	 * many times, you can call this method with a typical
+	 * size to avoid keeping around a very large array just
+	 * because of a few large transient lists.
+	 *
+	 * @param n the threshold for the trimming.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public void trim( final int n ) {
+		// TODO: use Arrays.trim() and preserve type only if necessary
+		if ( n >= a.length || size == a.length ) return;
+		final KEY_GENERIC_TYPE t[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ Math.max( n, size )  ];
+		System.arraycopy( a, 0, t, 0, size );
+		a = t;
+		if ( ASSERTS ) assert size <= a.length;
+	}
+
+
+   	/** Copies element of this type-specific list into the given array using optimized system calls.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param a the destination array.
+	 * @param offset the offset into the destination array where to store the first element copied.
+	 * @param length the number of elements to be copied.
+	 */
+
+	public void getElements( final int from, final KEY_TYPE[] a, final int offset, final int length ) {
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		System.arraycopy( this.a, from, a, offset, length );
+	}
+
+	/** Removes elements of this type-specific list using optimized system calls.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param to the end index (exclusive).
+	 */
+	public void removeElements( final int from, final int to ) {
+		Arrays.ensureFromTo( size, from, to );
+		System.arraycopy( a, to, a, from, size - to );
+		size -= ( to - from );
+#if #keys(reference)
+		int i = to - from;
+		while( i-- != 0 ) a[ size + i ] = null;
+#endif
+	}
+	
+
+	/** Adds elements to this type-specific list using optimized system calls.
+	 *
+	 * @param index the index at which to add elements.
+	 * @param a the array containing the elements.
+	 * @param offset the offset of the first element to add.
+	 * @param length the number of elements to add.
+	 */
+	public void addElements( final int index, final KEY_GENERIC_TYPE a[], final int offset, final int length ) {
+		ensureIndex( index );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		grow( size + length );
+		System.arraycopy( this.a, index, this.a, index + length, size - index );
+		System.arraycopy( a, offset, this.a, index, length );
+		size += length;
+	}
+
+
+
+#if #keys(primitive)
+	
+	public KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE a[] ) {
+		if ( a == null || a.length < size ) a = new KEY_TYPE[ size ];
+		System.arraycopy( this.a, 0, a, 0, size );
+		return a;
+	}
+
+	public boolean addAll( int index, final COLLECTION c ) {
+		ensureIndex( index );
+		int n = c.size();
+		if ( n == 0 ) return false;
+		grow( size + n );
+		if ( index != size ) System.arraycopy( a, index, a, index + n, size - index );
+		final KEY_ITERATOR i = c.iterator();
+		size += n;
+		while( n-- != 0 ) a[ index++ ] = i.NEXT_KEY();
+		if ( ASSERTS ) assert size <= a.length;
+		return true;
+	}
+
+	public boolean addAll( final int index, final LIST l ) {
+		ensureIndex( index );
+		final int n = l.size();
+		if ( n == 0 ) return false;
+		grow( size + n );
+		if ( index != size ) System.arraycopy( a, index, a, index + n, size - index );
+		l.getElements( 0, a, index, n );
+		size += n;
+		if ( ASSERTS ) assert size <= a.length;
+		return true;
+	}
+
+#endif
+
+	public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) {
+		ensureIndex( index );
+
+		return new KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC() {
+				int pos = index, last = -1;
+
+				public boolean hasNext() { return pos < size; }
+				public boolean hasPrevious() { return pos > 0; }
+				public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return a[ last = pos++ ]; }
+				public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return a[ last = --pos ]; }
+				public int nextIndex() { return pos; }
+				public int previousIndex() { return pos - 1; }
+				public void add( KEY_GENERIC_TYPE k ) { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ARRAY_LIST.this.add( pos++, k ); 
+					last = -1;
+				}
+				public void set( KEY_GENERIC_TYPE k ) { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ARRAY_LIST.this.set( last, k );
+				}
+				public void remove() { 
+					if ( last == -1 ) throw new IllegalStateException();
+					ARRAY_LIST.this.REMOVE_KEY( last ); 
+					/* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */
+					if ( last < pos ) pos--;
+					last = -1;
+				}
+			};
+	}
+
+
+	@SuppressWarnings("unchecked")
+	public ARRAY_LIST KEY_GENERIC clone() {
+		ARRAY_LIST KEY_GENERIC c = new ARRAY_LIST KEY_GENERIC( size );
+		System.arraycopy( a, 0, c.a, 0, size );
+		c.size = size;
+		return c;
+	}
+
+#if #keyclass(Object)
+	private boolean valEquals( final K a, final K b ) {
+		return a == null ? b == null : a.equals( b );
+	}
+#endif
+
+    /** Compares this type-specific array list to another one.
+	 *
+	 * <P>This method exists only for sake of efficiency. The implementation
+	 * inherited from the abstract implementation would already work.
+	 *
+	 * @param l a type-specific array list.
+     * @return true if the argument contains the same elements of this type-specific array list.
+	 */
+	public boolean equals( final ARRAY_LIST KEY_GENERIC l ) {
+		if ( l == this ) return true;
+		int s = size();
+		if ( s != l.size() ) return false;
+		final KEY_GENERIC_TYPE[] a1 = a;
+		final KEY_GENERIC_TYPE[] a2 = l.a;
+
+#if #keyclass(Object)
+		while( s-- !=  0 ) if ( ! valEquals( a1[ s ], a2[ s ] ) ) return false;
+#else
+		while( s-- !=  0 ) if ( a1[ s ] != a2[ s ] ) return false;
+#endif
+		return true;
+	}
+
+
+#if ! #keyclass(Reference)
+
+    /** Compares this array list to another array list.
+     *
+	 * <P>This method exists only for sake of efficiency. The implementation
+	 * inherited from the abstract implementation would already work.
+	 *
+     * @param l an array list.
+     * @return a negative integer,
+     * zero, or a positive integer as this list is lexicographically less than, equal
+     * to, or greater than the argument.
+     */
+	@SuppressWarnings("unchecked")
+	public int compareTo( final ARRAY_LIST KEY_EXTENDS_GENERIC l ) {
+		final int s1 = size(), s2 = l.size();
+		final KEY_GENERIC_TYPE a1[] = a, a2[] = l.a;
+		KEY_GENERIC_TYPE e1, e2;
+		int r, i;
+		
+		for( i = 0; i < s1 && i < s2; i++ ) {
+			e1 = a1[ i ];
+			e2 = a2[ i ];
+			if ( ( r = KEY_CMP( e1, e2 ) ) != 0 ) return r;
+		}
+
+		return i < s2 ? -1 : ( i < s1 ? 1 : 0 );
+	}
+#endif
+
+
+	private void writeObject( java.io.ObjectOutputStream s ) throws java.io.IOException {
+		s.defaultWriteObject();
+		for( int i = 0; i < size; i++ ) s.WRITE_KEY( a[ i ] );
+	}
+
+	@SuppressWarnings("unchecked")
+	private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		a = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ size ];
+		for( int i = 0; i < size; i++ ) a[ i ] = KEY_GENERIC_CAST s.READ_KEY();
+	}
+
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static Object[] k, v, nk;
+	private static KEY_TYPE kt[];
+	private static KEY_TYPE nkt[];
+	private static ARRAY_LIST topList;
+
+	protected static void testLists( LIST m, java.util.List t, int n, int level ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds;
+		Object rt = null;
+		KEY_TYPE rm = KEY_NULL;
+
+		if ( level > 4 ) return;
+				
+
+		/* Now we check that both sets agree on random keys. For m we use the polymorphic method. */
+
+		for( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+
+			KEY_TYPE T = genKey();
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+			
+			try {
+				m.set( p, T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			try {
+				t.set( p, KEY2OBJ( T ) );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): set() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( KEY2OBJ( m.GET_KEY( p ) ) ), "Error (" + level + ", " + seed + "): m and t differ after set() on position " + p + " (" + m.GET_KEY( p ) + ", " + t.get( p ) + ")" );
+
+			p = r.nextInt() % ( n * 2 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+			
+			try {
+				m.GET_KEY( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( KEY2OBJ( m.GET_KEY( p ) ) ), "Error (" + level + ", " + seed + "): m and t differ aftre get() on position " + p + " (" + m.GET_KEY( p ) + ", " + t.get( p ) + ")" );
+			
+		}
+		
+		/* Now we check that both sets agree on random keys. For m we use the standard method. */
+
+		for( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+			
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( m.get( p ) ), "Error (" + level + ", " + seed + "): m and t differ at start on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")" );
+			
+		}
+		
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+		
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+			
+
+		/* Now we check that m actually holds that data. */
+		for(Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(Iterator i=m.listIterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" );
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			ensure( m.contains(T) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in content between t and m (polymorphic method)" );
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			ensure( m.contains(KEY2OBJ(T)) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in content between t and m (polymorphic method)" );
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			try {
+				m.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.add( KEY2OBJ( T ) );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			T = genKey();
+			int p = r.nextInt() % ( 2 * n + 1 );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.add(p, T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.add(p, KEY2OBJ(T));
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): add() divergence in IndexOutOfBoundsException for index " + p + " for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			p = r.nextInt() % ( 2 * n + 1 );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				rm = m.REMOVE_KEY(p);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				rt = t.remove(p);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): remove() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt.equals( KEY2OBJ( rm ) ), "Error (" + level + ", " + seed + "): divergence in remove() between t and m (" + rt + ", " + rm + ")" );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after add/remove" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after add/remove" );
+
+		/* Now we add random data in m and t using addAll on a collection, checking that the result is the same. */
+
+		for(int i=0; i<n;  i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			Collection m1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for( int j = 0; j < s; j++ ) m1.add( KEY2OBJ( genKey() ) );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.addAll(p, m1);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.addAll(p, m1);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after addAll" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + m + t + "): ! t.equals( m ) after addAll" );
+		}
+
+		if ( m.size() > n ) {
+			m.size( n );
+			while( t.size() != n ) t.remove( t.size() -1 );
+		}
+
+		/* Now we add random data in m and t using addAll on a type-specific collection, checking that the result is the same. */
+
+		for(int i=0; i<n;  i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			COLLECTION m1 = new ARRAY_LIST();
+			Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for( int j = 0; j < s; j++ ) {
+				KEY_TYPE x = genKey();
+				m1.add( x );
+				t1.add( KEY2OBJ( x ) );
+			}
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.addAll(p, m1);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.addAll(p, t1);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): polymorphic addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after polymorphic addAll" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + m + t + "): ! t.equals( m ) after polymorphic addAll" );
+		}
+
+		if ( m.size() > n ) {
+			m.size( n );
+			while( t.size() != n ) t.remove( t.size() -1 );
+		}
+
+		/* Now we add random data in m and t using addAll on a list, checking that the result is the same. */
+
+		for(int i=0; i<n;  i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			LIST m1 = new ARRAY_LIST();
+			Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for( int j = 0; j < s; j++ ) {
+				KEY_TYPE x = genKey();
+				m1.add( x );
+				t1.add( KEY2OBJ( x ) );
+			}
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.addAll(p, m1);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.addAll(p, t1);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): list addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after list addAll" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after list addAll" );
+		}
+
+		/* Now we add random data in m and t using addElements, checking that the result is the same. */
+
+		for(int i=0; i<n;  i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			KEY_TYPE a[] = new KEY_TYPE [ s ];
+			for( int j = 0; j < s; j++ ) {
+				KEY_TYPE x = genKey();
+				t1.add( KEY2OBJ( x ) );
+				a[ j ] = x;
+			}
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.addElements(p, a);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.addAll(p, t1);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): list addElements() divergence in IndexOutOfBoundsException for index " + p + " for " + t1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after list addElements" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after list addElements" );
+		}
+
+		if ( m.size() > n ) {
+			m.size( n );
+			while( t.size() != n ) t.remove( t.size() -1 );
+		}
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(Iterator i=m.listIterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on m)" );
+		}
+
+		/* Now we check that both sets agree on random keys. For m we use the standard method. */
+
+		for( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+			
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( m.get( p ) ), "Error (" + level + ", " + seed + "): m and t differ on position " + p + " (" + m.get( p ) + ", " + t.get( p ) +")" );
+			
+		}
+
+		/* Now we inquiry about the content with indexOf()/lastIndexOf(). */
+
+		for(int i=0; i<10*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			ensure( m.indexOf( KEY2OBJ( T ) ) == t.indexOf( KEY2OBJ( T ) ),
+					"Error (" + level + ", " + seed + "): indexOf() divergence for " + T + "  (" + m.indexOf( KEY2OBJ( T ) ) + ", " + t.indexOf( KEY2OBJ( T ) ) + ")" );
+			ensure( m.lastIndexOf( KEY2OBJ( T ) ) == t.lastIndexOf( KEY2OBJ( T ) ),
+					"Error (" + level + ", " + seed + "): lastIndexOf() divergence for " + T + "  (" + m.lastIndexOf( KEY2OBJ( T ) ) + ", " + t.lastIndexOf( KEY2OBJ( T ) ) + ")" );
+			ensure( m.indexOf( T ) == t.indexOf( KEY2OBJ( T ) ),
+					"Error (" + level + ", " + seed + "): polymorphic indexOf() divergence for " + T + "  (" + m.indexOf( T ) + ", " + t.indexOf( KEY2OBJ( T ) ) + ")" );
+			ensure( m.lastIndexOf( T ) == t.lastIndexOf( KEY2OBJ( T ) ),
+					"Error (" + level + ", " + seed + "): polymorphic lastIndexOf() divergence for " + T + "  (" + m.lastIndexOf( T ) + ", " + t.lastIndexOf( KEY2OBJ( T ) ) + ")" );
+		}
+
+		/* Now we check cloning. */
+
+		if ( level == 0 ) {
+			ensure( m.equals( ((ARRAY_LIST)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((ARRAY_LIST)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+		}
+
+		/* Now we play with constructors. */
+		ensure( m.equals( new ARRAY_LIST( (Collection)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( Collection m )" );
+		ensure( ( new ARRAY_LIST( (Collection)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( Collection m )does not equal m" );
+		ensure( m.equals( new ARRAY_LIST( (COLLECTION)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( type-specific Collection m )" );
+		ensure( ( new ARRAY_LIST( (COLLECTION)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( type-specific Collection m ) does not equal m" );
+		ensure( m.equals( new ARRAY_LIST( (LIST)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( type-specific List m )" );
+		ensure( ( new ARRAY_LIST( (LIST)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( type-specific List m ) does not equal m" );
+		ensure( m.equals( new ARRAY_LIST( m.listIterator() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( m.listIterator() )" );
+		ensure( ( new ARRAY_LIST( m.listIterator() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( m.listIterator() ) does not equal m" );
+		ensure( m.equals( new ARRAY_LIST( m.iterator() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( m.type_specific_iterator() )" );
+		ensure( ( new ARRAY_LIST( m.iterator() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( m.type_specific_iterator() ) does not equal m" );
+
+		/* Now we play with conversion to array, wrapping and copying. */
+		ensure( m.equals( new ARRAY_LIST( m.TO_KEY_ARRAY() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( toArray( m ) )" );
+		ensure( ( new ARRAY_LIST( m.TO_KEY_ARRAY() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( toArray( m ) ) does not equal m" );
+		ensure( m.equals( wrap( m.TO_KEY_ARRAY() ) ), "Error (" + level + ", " + seed + "): m does not equal wrap ( toArray( m ) )" );
+		ensure( ( wrap( m.TO_KEY_ARRAY() ) ).equals( m ), "Error (" + level + ", " + seed + "): wrap ( toArray( m ) ) does not equal m" );
+		
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		LIST m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (LIST)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if ! #keyclass(Reference)
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(Iterator i=t.iterator(); i.hasNext(); ) m2.remove(i.next());
+
+		ensure( m2.isEmpty(), "Error (" + level + ", " + seed + "): m2 is not empty (as it should be)" );
+#endif		  
+				 
+		/* Now we play with iterators. */
+
+		{
+			KEY_LIST_ITERATOR i;
+			java.util.ListIterator j;
+			Object J;
+			i = m.listIterator(); 
+			j = t.listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + level + ", " + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( i.next().equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next()" );
+
+					if ( r.nextFloat() < 0.2 ) {
+						i.remove();
+						j.remove();
+					} 
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.set( T );
+						j.set( KEY2OBJ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.add( T );
+						j.add( KEY2OBJ( T ) );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous()" );
+
+					if ( r.nextFloat() < 0.2 ) {
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.set( T );
+						j.set( KEY2OBJ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.add( T );
+						j.add( KEY2OBJ( T ) );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + level + ", " + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + level + ", " + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+
+		{
+			Object previous = null;
+			Object I, J;
+			int from = r.nextInt( m.size() +1 );
+			KEY_LIST_ITERATOR i;
+			java.util.ListIterator j;
+			i = m.listIterator( from ); 
+			j = t.listIterator( from ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious() , "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+					//System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+
+					if ( r.nextFloat() < 0.2 ) {
+						//System.err.println("Removing in next");
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.set( T );
+						j.set( KEY2OBJ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.add( T );
+						j.add( KEY2OBJ( T ) );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+
+					if ( r.nextFloat() < 0.2 ) {
+						//System.err.println("Removing in prev");
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.set( T );
+						j.set( KEY2OBJ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.add( T );
+						j.add( KEY2OBJ( T ) );
+					}
+				}
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" );
+
+		/* Now we select a pair of keys and create a subset. */
+
+		if ( ! m.isEmpty() ) {
+			int start = r.nextInt( m.size() );
+			int end = start + r.nextInt( m.size() - start );
+			//System.err.println("Checking subList from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testLists( m.subList( start, end ), t.subList( start, end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after subList" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subList" );
+
+		}
+
+		m.clear();
+		t.clear();
+		ensure( m.isEmpty(), "Error (" + level + ", " + seed + "): m is not empty after clear()" );
+	}
+
+
+	protected static void test( int n ) {
+		ARRAY_LIST m = new ARRAY_LIST();
+		java.util.ArrayList t = new java.util.ArrayList();
+		topList = m;
+		k = new Object[n];
+		nk = new Object[n];
+		kt = new KEY_TYPE[n];
+		nkt = new KEY_TYPE[n];
+
+		for( int i = 0; i < n; i++ ) {
+#if #keys(reference)
+			k[i] = kt[i] = genKey();
+			nk[i] = nkt[i] = genKey();
+#else
+			k[i] = new KEY_CLASS( kt[i] = genKey() );
+			nk[i] = new KEY_CLASS( nkt[i] = genKey() );
+#endif
+		}
+		  
+		/* We add pairs to t. */
+		for( int i = 0; i < n;  i++ ) t.add( k[i] );
+		  
+		/* We add to m the same data */
+		m.addAll(t);
+
+		testLists( m, t, n, 0 );
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/ArrayMap.drv b/drv/ArrayMap.drv
new file mode 100644
index 0000000..dc8a918
--- /dev/null
+++ b/drv/ArrayMap.drv
@@ -0,0 +1,326 @@
+/*		 
+ * Copyright (C) 2007-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Map;
+import java.util.NoSuchElementException;
+import it.unimi.dsi.fastutil.objects.AbstractObjectIterator;
+import it.unimi.dsi.fastutil.objects.AbstractObjectSet;
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+import VALUE_PACKAGE.VALUE_COLLECTIONS;
+import VALUE_PACKAGE.VALUE_ARRAY_SET;
+import VALUE_PACKAGE.VALUE_ARRAYS;
+
+/** A simple, brute-force implementation of a map based on two parallel backing arrays. 
+ * 
+ * <p>The main purpose of this
+ * implementation is that of wrapping cleanly the brute-force approach to the storage of a very 
+ * small number of pairs: just put them into two parallel arrays and scan linearly to find an item.
+ */
+
+public class ARRAY_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable {
+
+	private static final long serialVersionUID = 1L;
+	/** The keys (valid up to {@link #size}, excluded). */
+	private transient KEY_TYPE[] key;
+	/** The values (parallel to {@link #key}). */
+	private transient VALUE_TYPE[] value;
+	/** The number of valid entries in {@link #key} and {@link #value}. */
+	private int size;
+	
+	/** Creates a new empty array map with given key and value backing arrays. The resulting map will have as many entries as the given arrays.
+	 * 
+	 * <p>It is responsibility of the caller that the elements of <code>key</code> are distinct.
+	 * 
+	 * @param key the key array.
+	 * @param value the value array (it <em>must</em> have the same length as <code>key</code>).
+	 */
+	public ARRAY_MAP( final KEY_TYPE[] key, final VALUE_TYPE[] value ) {
+		this.key = key;
+		this.value = value;
+		size = key.length;
+		if( key.length != value.length ) throw new IllegalArgumentException( "Keys and values have different lengths (" + key.length + ", " + value.length + ")" );
+	}
+
+	/** Creates a new empty array map.
+	 */
+	public ARRAY_MAP() {
+		this.key = ARRAYS.EMPTY_ARRAY;
+		this.value = VALUE_ARRAYS.EMPTY_ARRAY;
+	}
+	
+	/** Creates a new empty array map of given capacity.
+	 *
+	 * @param capacity the initial capacity.
+	 */ 
+	public ARRAY_MAP( final int capacity ) {
+		this.key = new KEY_TYPE[ capacity ];
+		this.value = new VALUE_TYPE[ capacity ];
+	}
+	
+	/** Creates a new empty array map copying the entries of a given map.
+	 *
+	 * @param m a map.
+	 */ 
+	public ARRAY_MAP( final MAP KEY_VALUE_GENERIC m ) {
+		this( m.size() );
+		putAll( m );
+	}
+	
+	/** Creates a new empty array map copying the entries of a given map.
+	 *
+	 * @param m a map.
+	 */ 
+	public ARRAY_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) {
+		this( m.size() );
+		putAll( m );
+	}
+	
+	/** Creates a new array map with given key and value backing arrays, using the given number of elements.
+	 * 
+	 * <p>It is responsibility of the caller that the first <code>size</code> elements of <code>key</code> are distinct.
+	 * 
+	 * @param key the key array.
+	 * @param value the value array (it <em>must</em> have the same length as <code>key</code>).
+	 * @param size the number of valid elements in <code>key</code> and <code>value</code>.
+	 */
+	public ARRAY_MAP( final KEY_TYPE[] key, final VALUE_TYPE[] value, final int size ) {
+		this.key = key;
+		this.value = value;
+		this.size = size;
+		if( key.length != value.length ) throw new IllegalArgumentException( "Keys and values have different lengths (" + key.length + ", " + value.length + ")" );
+		if ( size > key.length ) throw new IllegalArgumentException( "The provided size (" + size + ") is larger than or equal to the backing-arrays size (" + key.length + ")" );
+	}
+
+	private final class EntrySet extends AbstractObjectSet<MAP.Entry KEY_VALUE_GENERIC> implements FastEntrySet KEY_VALUE_GENERIC {
+
+		@Override
+		public ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+			return new AbstractObjectIterator<MAP.Entry KEY_VALUE_GENERIC>() {
+				int next = 0;
+				
+				public boolean hasNext() {
+					return next < size;
+				}
+
+				@SuppressWarnings("unchecked")
+				public Entry KEY_VALUE_GENERIC next() {
+					if ( ! hasNext() ) throw new NoSuchElementException();
+					return new ABSTRACT_MAP.BasicEntry KEY_VALUE_GENERIC( KEY_GENERIC_CAST key[ next ], VALUE_GENERIC_CAST value[ next++ ] );
+				}
+				
+			};
+		}
+
+		public ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator() {
+			return new AbstractObjectIterator<MAP.Entry KEY_VALUE_GENERIC>() {
+				int next = 0;
+				final BasicEntry KEY_VALUE_GENERIC entry = new BasicEntry KEY_VALUE_GENERIC ( KEY_NULL, VALUE_NULL );
+				
+				public boolean hasNext() {
+					return next < size;
+				}
+
+				@SuppressWarnings("unchecked")
+				public Entry KEY_VALUE_GENERIC next() {
+					if ( ! hasNext() ) throw new NoSuchElementException();
+					entry.key = KEY_GENERIC_CAST key[ next ];
+					entry.value = VALUE_GENERIC_CAST value[ next++ ];
+					return entry;
+				}
+				
+			};
+		}
+
+		public int size() {
+			return size;
+		}
+
+		@SuppressWarnings("unchecked")
+		public boolean contains( Object o ) {
+			if ( ! ( o instanceof Map.Entry ) ) return false;
+			final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+			final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE( e.getKey() );
+			return ARRAY_MAP.this.containsKey( k ) && VALUE_EQUALS( ARRAY_MAP.this.GET_VALUE( k ), VALUE_CLASS2TYPE( e.getValue() ) );
+		}
+		
+	}
+
+	public FastEntrySet KEY_VALUE_GENERIC ENTRYSET() {
+		return new EntrySet();
+	}
+
+	private int findKey( final KEY_TYPE k ) {
+		final KEY_TYPE[] key = this.key;
+		for( int i = size; i-- != 0; ) if ( KEY_EQUALS( key[ i ], k ) ) return i;
+		return -1;
+	}
+
+	@SuppressWarnings("unchecked")
+#if #keys(primitive) || #values(primitive)
+	public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) {
+#else
+	public VALUE_GENERIC_TYPE get( final Object k ) {
+#endif
+		final KEY_TYPE[] key = this.key;
+		for( int i = size; i-- != 0; ) if ( KEY_EQUALS( key[ i ], k ) ) return VALUE_GENERIC_CAST value[ i ];
+		return defRetValue;
+	}
+
+	public int size() {
+		return size;
+	}
+
+	@Override
+	public void clear() {
+#if #keys(reference) || #values(reference)
+		for( int i = size; i-- != 0; ) {
+#if #keys(reference)
+			key[ i ] = null;
+#endif
+#if #values(reference)
+			value[ i ] = null;
+#endif
+		}
+#endif
+		size = 0;
+	}
+
+	@Override
+	public boolean containsKey( final KEY_TYPE k ) {
+		return findKey( k ) != -1;
+	}
+
+	@Override
+	@SuppressWarnings("unchecked")
+	public boolean containsValue( VALUE_TYPE v ) {
+		for( int i = size; i-- != 0; ) if ( VALUE_EQUALS( value[ i ], v ) ) return true;
+		return false;
+	}
+
+	@Override
+	public boolean isEmpty() {
+		return size == 0;
+	}
+
+	@Override
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE put( KEY_GENERIC_TYPE k, VALUE_GENERIC_TYPE v ) {
+		final int oldKey = findKey( k );
+		if ( oldKey != -1 ) {
+			final VALUE_GENERIC_TYPE oldValue = VALUE_GENERIC_CAST value[ oldKey ];
+			value[ oldKey ] = v;
+			return oldValue;
+		}
+		if ( size == key.length ) {
+			final KEY_TYPE[] newKey = new KEY_TYPE[ size == 0 ? 2 : size * 2 ];
+			final VALUE_TYPE[] newValue = new VALUE_TYPE[ size == 0 ? 2 : size * 2 ];
+			for( int i = size; i-- != 0; ) {
+				newKey[ i ] = key[ i ];
+				newValue[ i ] = value[ i ];
+			}
+			key = newKey;
+			value = newValue;
+		}
+		key[ size ] = k;
+		value[ size ] = v;
+		size++;
+		return defRetValue;
+	}
+
+	@Override
+	@SuppressWarnings("unchecked")
+
+#if #keys(primitive) || #values(primitive)
+	public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) {
+#else
+	public VALUE_GENERIC_TYPE remove( final Object k ) {
+#endif
+		final int oldPos = findKey( k );
+		if ( oldPos == -1 ) return defRetValue;
+		final VALUE_GENERIC_TYPE oldValue = VALUE_GENERIC_CAST value[ oldPos ];
+		final int tail = size - oldPos - 1;
+		for( int i = 0; i < tail; i++ ) {
+			key[ oldPos + i ] = key[ oldPos + i + 1 ];
+			value[ oldPos + i ] = value[ oldPos + i + 1 ];
+		}
+		size--;
+#if #keys(reference)
+		key[ size ] = null;
+#endif
+#if #values(reference)
+		value[ size ] = null;
+#endif
+		return oldValue;
+	}
+
+	@Override
+
+	@SuppressWarnings("unchecked")
+	public SET KEY_GENERIC keySet() {
+		return new ARRAY_SET KEY_GENERIC( key, size );
+	}
+
+	@Override
+	public VALUE_COLLECTION VALUE_GENERIC values() {
+		return VALUE_COLLECTIONS.unmodifiable( new VALUE_ARRAY_SET VALUE_GENERIC( value, size ) );
+	}
+
+	/** Returns a deep copy of this map. 
+	 *
+	 * <P>This method performs a deep copy of this hash map; the data stored in the
+	 * map, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 *  @return a deep copy of this map.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public ARRAY_MAP KEY_VALUE_GENERIC clone() {
+		ARRAY_MAP KEY_VALUE_GENERIC c;
+		try {
+			c = (ARRAY_MAP KEY_VALUE_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+		c.key = key.clone();
+		c.value = value.clone();
+		return c;
+	}
+	
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		s.defaultWriteObject();
+		for( int i = 0; i < size; i++ ) {
+			s.WRITE_KEY( key[ i ] );
+			s.WRITE_VALUE( value[ i ] );
+		}
+	}
+
+	@SuppressWarnings("unchecked")
+	private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		key = new KEY_TYPE[ size ];
+		value = new VALUE_TYPE[ size ];
+		for( int i = 0; i < size; i++ ) {
+			key[ i ] = s.READ_KEY();
+			value[ i ] = s.READ_VALUE();
+		}
+	}
+}
diff --git a/drv/ArrayPriorityQueue.drv b/drv/ArrayPriorityQueue.drv
new file mode 100644
index 0000000..7ee75a4
--- /dev/null
+++ b/drv/ArrayPriorityQueue.drv
@@ -0,0 +1,210 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+import it.unimi.dsi.fastutil.AbstractPriorityQueue;
+#endif
+
+import java.util.NoSuchElementException;
+
+/** A type-specific array-based priority queue.
+ *
+ * <P>Instances of this class represent a priority queue using a backing
+ * array—all operations are performed directly on the array. The array is
+ * enlarged as needed, but it is never shrunk. Use the {@link #trim()} method
+ * to reduce its size, if necessary.
+ *
+ * <P>This implementation is extremely inefficient, but it is difficult to beat
+ * when the size of the queue is very small.
+ */
+
+public class ARRAY_PRIORITY_QUEUE KEY_GENERIC extends ABSTRACT_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The backing array. */
+	@SuppressWarnings("unchecked")
+	protected KEY_GENERIC_TYPE array[] = KEY_GENERIC_ARRAY_CAST ARRAYS.EMPTY_ARRAY;
+
+	/** The number of elements in this queue. */
+	protected int size;
+	
+	/** The type-specific comparator used in this queue. */
+	protected KEY_COMPARATOR KEY_SUPER_GENERIC c;
+
+	/** The first index, cached, if {@link #firstIndexValid} is true. */
+	protected int firstIndex;
+
+	/** Whether {@link #firstIndex} contains a valid value. */
+	protected boolean firstIndexValid;
+
+	/** Creates a new empty queue with a given capacity and comparator.
+	 *
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	@SuppressWarnings("unchecked")
+	public ARRAY_PRIORITY_QUEUE( int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		if ( capacity > 0 ) this.array = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ capacity ];
+		this.c = c;
+	}
+
+	/** Creates a new empty queue with a given capacity and using the natural order.
+	 *
+	 * @param capacity the initial capacity of this queue.
+	 */
+	public ARRAY_PRIORITY_QUEUE( int capacity ) {
+		this( capacity, null );
+	}
+
+	/** Creates a new empty queue with a given comparator.
+	 *
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_PRIORITY_QUEUE( KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( 0, c );
+	}
+
+	/** Creates a new empty queue using the natural order. 
+	 */
+	public ARRAY_PRIORITY_QUEUE() {
+		this( 0, null );
+	}
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param a an array.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( c );
+		this.array = a;
+		this.size = size;
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param a an array.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public ARRAY_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( a, a.length, c );
+	}
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param a an array.
+	 * @param size the number of elements to be included in the queue.
+	 */
+	public ARRAY_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, int size ) {
+		this( a, size, null );
+	}
+
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 *
+	 * @param a an array.
+	 */
+	public ARRAY_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a ) {
+		this( a, a.length );
+	}
+
+
+
+	/** Returns the index of the smallest element. */
+
+	@SuppressWarnings("unchecked")
+	private int findFirst() {
+		if ( firstIndexValid ) return this.firstIndex;
+		firstIndexValid = true;
+		int i = size;
+		int firstIndex = --i;
+		KEY_GENERIC_TYPE first = array[ firstIndex ];
+
+		if ( c == null ) { while( i-- != 0 ) if ( KEY_LESS( array[ i ], first ) ) first = array[ firstIndex = i ]; }
+		else while( i-- != 0 ) { if ( c.compare( array[ i ], first ) < 0 ) first = array[ firstIndex = i ]; }
+
+		return this.firstIndex = firstIndex;
+	}
+
+	private void ensureNonEmpty() {
+		if ( size == 0 ) throw new NoSuchElementException();
+	}
+
+	@SuppressWarnings("unchecked")
+	public void enqueue( KEY_GENERIC_TYPE x ) {
+		if ( size == array.length ) array = ARRAYS.grow( array, size + 1 );
+		if ( firstIndexValid ) {
+			if ( c == null ) { if ( KEY_LESS( x, array[ firstIndex ] ) ) firstIndex = size; }
+			else if ( c.compare( x, array[ firstIndex ] ) < 0 ) firstIndex = size;
+		}
+		else firstIndexValid = false;
+		array[ size++ ] = x;
+	}
+
+	public KEY_GENERIC_TYPE DEQUEUE() {
+		ensureNonEmpty();
+		final int first = findFirst();
+		final KEY_GENERIC_TYPE result = array[ first ];
+		System.arraycopy( array, first + 1, array, first, --size - first );
+#if #keyclass(Object)
+		array[ size ] = null;
+#endif
+		firstIndexValid = false;
+		return result;
+	}
+
+	public KEY_GENERIC_TYPE FIRST() {
+		ensureNonEmpty();
+		return array[ findFirst() ];
+	}
+
+	public void changed() {
+		ensureNonEmpty();
+		firstIndexValid = false;
+	}
+
+	public int size() { return size; }
+
+	public void clear() { 
+#if #keyclass(Object)
+		ObjectArrays.fill( array, 0, size, null );
+#endif
+		size = 0; 
+		firstIndexValid = false;
+	}
+
+	/** Trims the underlying array so that it has exactly {@link #size()} elements.
+	 */
+
+	public void trim() {
+		array = ARRAYS.trim( array, size );
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return c; }
+}
diff --git a/drv/ArraySet.drv b/drv/ArraySet.drv
new file mode 100644
index 0000000..1741be8
--- /dev/null
+++ b/drv/ArraySet.drv
@@ -0,0 +1,186 @@
+/*		 
+ * Copyright (C) 2007-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Collection;
+
+/** A simple, brute-force implementation of a set based on a backing array.
+ *
+ * <p>The main purpose of this
+ * implementation is that of wrapping cleanly the brute-force approach to the storage of a very 
+ * small number of items: just put them into an array and scan linearly to find an item.
+ */
+
+public class ARRAY_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable {
+
+	private static final long serialVersionUID = 1L;
+	/** The backing array (valid up to {@link #size}, excluded). */
+	private transient KEY_TYPE[] a;
+	/** The number of valid entries in {@link #a}. */
+	private int size;
+	
+	/** Creates a new array set using the given backing array. The resulting set will have as many elements as the array.
+	 * 
+	 * <p>It is responsibility of the caller that the elements of <code>a</code> are distinct.
+	 * 
+	 * @param a the backing array.
+	 */
+	public ARRAY_SET( final KEY_TYPE[] a ) {
+		this.a = a;
+		size = a.length;
+	}
+
+	/** Creates a new empty array set.
+	 */
+	public ARRAY_SET() {
+		this.a = ARRAYS.EMPTY_ARRAY;
+	}
+
+	/** Creates a new empty array set of given initial capacity.
+	 * 
+	 * @param capacity the initial capacity.
+	 */
+	public ARRAY_SET( final int capacity ) {
+		this.a = new KEY_TYPE[ capacity ];
+	}
+
+	/** Creates a new array set copying the contents of a given collection.
+	 * @param c a collection.
+	 */
+	public ARRAY_SET( COLLECTION KEY_GENERIC c ) {
+		this( c.size () );
+		addAll( c );
+	}
+
+	/** Creates a new array set copying the contents of a given set.
+	 * @param c a collection.
+	 */
+	public ARRAY_SET( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		this( c.size() );
+		addAll( c );
+	}
+
+
+	/** Creates a new array set using the given backing array and the given number of elements of the array.
+	 *
+	 * <p>It is responsibility of the caller that the first <code>size</code> elements of <code>a</code> are distinct.
+	 * 
+	 * @param a the backing array.
+	 * @param size the number of valid elements in <code>a</code>.
+	 */
+	public ARRAY_SET( final KEY_TYPE[] a, final int size ) {
+		this.a = a;
+		this.size = size;
+		if ( size > a.length ) throw new IllegalArgumentException( "The provided size (" + size + ") is larger than or equal to the array size (" + a.length + ")" );
+	}
+
+	private int findKey( final KEY_TYPE o ) {
+		for( int i = size; i-- != 0; ) if ( KEY_EQUALS( a[ i ], o ) ) return i;
+		return -1;
+	}
+
+	@Override
+	@SuppressWarnings("unchecked")
+	public KEY_ITERATOR KEY_GENERIC iterator() {
+		return ITERATORS.wrap( KEY_GENERIC_ARRAY_CAST a, 0, size );
+	}
+
+	@SuppressWarnings("unchecked")
+	public boolean contains( final KEY_TYPE k ) {
+		return findKey( k ) != -1;
+	}
+
+	public int size() {
+		return size;
+	}
+	
+	@Override
+	@SuppressWarnings("unchecked")
+	public boolean remove( final KEY_TYPE k ) {
+		final int pos = findKey( k );
+		if ( pos == -1 ) return false;
+		final int tail = size - pos - 1;
+		for( int i = 0; i < tail; i++ ) a[ pos + i ] = a[ pos + i + 1 ];
+		size--;
+#if #keys(reference)
+		a[ size ] = null;
+#endif
+		return true;
+	}
+
+	@Override
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		final int pos = findKey( k );
+		if ( pos != -1 ) return false;
+		if ( size == a.length ) {
+			final KEY_TYPE[] b = new KEY_TYPE[ size == 0 ? 2 : size * 2 ];
+			for( int i = size; i-- != 0; ) b[ i ] = a[ i ];
+			a = b;
+		}
+		a[ size++ ] = k;
+		return true;
+	}
+
+	@Override
+	public void clear() {
+#if #keys(reference)
+		for( int i = size; i-- != 0; ) a[ i ] = null;
+#endif
+		size = 0;
+	}
+
+	@Override
+	public boolean isEmpty() {
+		return size == 0;
+	}
+	
+	/** Returns a deep copy of this set. 
+	 *
+	 * <P>This method performs a deep copy of this hash set; the data stored in the
+	 * set, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 *  @return a deep copy of this set.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public ARRAY_SET KEY_GENERIC clone() {
+		ARRAY_SET KEY_GENERIC c;
+		try {
+			c = (ARRAY_SET KEY_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+		c.a = a.clone();
+		return c;
+	}
+	
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		s.defaultWriteObject();
+		for( int i = 0; i < size; i++ ) s.WRITE_KEY( a[ i ] );
+	}
+
+
+	@SuppressWarnings("unchecked")
+	private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		a = new KEY_TYPE[ size ];
+		for( int i = 0; i < size; i++ ) a[ i ] = s.READ_KEY();
+	}
+	
+}
diff --git a/drv/Arrays.drv b/drv/Arrays.drv
new file mode 100644
index 0000000..1d14b5b
--- /dev/null
+++ b/drv/Arrays.drv
@@ -0,0 +1,1729 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ *
+ *
+ *
+ * For the sorting and binary search code:
+ *
+ * Copyright (C) 1999 CERN - European Organization for Nuclear Research.
+ *
+ *   Permission to use, copy, modify, distribute and sell this software and
+ *   its documentation for any purpose is hereby granted without fee,
+ *   provided that the above copyright notice appear in all copies and that
+ *   both that copyright notice and this permission notice appear in
+ *   supporting documentation. CERN makes no representations about the
+ *   suitability of this software for any purpose. It is provided "as is"
+ *   without expressed or implied warranty. 
+ */
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.Arrays;
+import it.unimi.dsi.fastutil.Hash;
+import java.util.Random;
+
+#if #keys(primitive)
+
+/** A class providing static methods and objects that do useful things with type-specific arrays.
+ *
+ * <p>In particular, the <code>ensureCapacity()</code>, <code>grow()</code>,
+ * <code>trim()</code> and <code>setLength()</code> methods allow to handle
+ * arrays much like array lists. This can be very useful when efficiency (or
+ * syntactic simplicity) reasons make array lists unsuitable.
+ *
+ * <P>Note that {@link it.unimi.dsi.fastutil.io.BinIO} and {@link it.unimi.dsi.fastutil.io.TextIO}
+ * contain several methods make it possible to load and save arrays of primitive types as sequences
+ * of elements in {@link java.io.DataInput} format (i.e., not as objects) or as sequences of lines of text.
+ *
+ * @see java.util.Arrays
+ */
+
+public class ARRAYS {
+
+#else
+
+import java.util.Comparator;
+
+/** A class providing static methods and objects that do useful things with type-specific arrays.
+ *
+ * In particular, the <code>ensureCapacity()</code>, <code>grow()</code>,
+ * <code>trim()</code> and <code>setLength()</code> methods allow to handle
+ * arrays much like array lists. This can be very useful when efficiency (or
+ * syntactic simplicity) reasons make array lists unsuitable.
+ *
+ * <P><strong>Warning:</strong> creating arrays 
+ * using {@linkplain java.lang.reflect.Array#newInstance(Class,int) reflection}, as it
+ * happens in {@link #ensureCapacity(Object[],int,int)} and {@link #grow(Object[],int,int)},
+ * is <em>significantly slower</em> than using <code>new</code>. This phenomenon is particularly
+ * evident in the first growth phases of an array reallocated with doubling (or similar) logic.
+ *
+ * @see java.util.Arrays
+ */
+
+public class ARRAYS {
+
+#endif
+	private ARRAYS() {}
+
+	/** A static, final, empty array. */
+	public final static KEY_TYPE[] EMPTY_ARRAY = {};
+
+
+#if #keyclass(Object)
+	/** Creates a new array using a the given one as prototype. 
+	 *
+	 * <P>This method returns a new array of the given length whose element
+	 * are of the same class as of those of <code>prototype</code>. In case
+	 * of an empty array, it tries to return {@link #EMPTY_ARRAY}, if possible.
+	 *
+	 * @param prototype an array that will be used to type the new one.
+	 * @param length the length of the new array.
+	 * @return a new array of given type and length.
+	 */
+
+	@SuppressWarnings("unchecked")
+	private static <K> K[] newArray( final K[] prototype, final int length ) {
+		final Class<?> componentType = prototype.getClass().getComponentType();
+		if ( length == 0 && componentType == Object.class ) return (K[])EMPTY_ARRAY;
+		return (K[])java.lang.reflect.Array.newInstance( prototype.getClass().getComponentType(), length );
+	}
+#endif
+
+	/** Ensures that an array can contain the given number of entries.
+	 *
+	 * <P>If you cannot foresee whether this array will need again to be
+	 * enlarged, you should probably use <code>grow()</code> instead.
+	 *
+	 * @param array an array.
+	 * @param length the new minimum length for this array.
+	 * @return <code>array</code>, if it contains <code>length</code> entries or more; otherwise,
+	 * an array with <code>length</code> entries whose first <code>array.length</code>
+	 * entries are the same as those of <code>array</code>.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] ensureCapacity( final KEY_GENERIC_TYPE[] array, final int length ) {
+		if ( length > array.length ) {
+			final KEY_GENERIC_TYPE t[] =
+#if #keyclass(Object)
+				newArray( array, length );
+#else
+				new KEY_TYPE[ length ];
+#endif
+			System.arraycopy( array, 0, t, 0, array.length );
+			return t;
+		}
+		return array;
+	}
+
+	/** Ensures that an array can contain the given number of entries, preserving just a part of the array.
+	 *
+	 * @param array an array.
+	 * @param length the new minimum length for this array.
+	 * @param preserve the number of elements of the array that must be preserved in case a new allocation is necessary.
+	 * @return <code>array</code>, if it can contain <code>length</code> entries or more; otherwise,
+	 * an array with <code>length</code> entries whose first <code>preserve</code>
+	 * entries are the same as those of <code>array</code>.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] ensureCapacity( final KEY_GENERIC_TYPE[] array, final int length, final int preserve ) {
+		if ( length > array.length ) {
+			final KEY_GENERIC_TYPE t[] =
+#if #keyclass(Object)
+				newArray( array, length );
+#else
+				new KEY_TYPE[ length ];
+#endif
+			System.arraycopy( array, 0, t, 0, preserve );
+			return t;
+		}
+		return array;
+	}
+
+	/** Grows the given array to the maximum between the given length and
+	 * the current length multiplied by two, provided that the given
+	 * length is larger than the current length.
+	 *
+	 * <P>If you want complete control on the array growth, you
+	 * should probably use <code>ensureCapacity()</code> instead.
+	 *
+	 * @param array an array.
+	 * @param length the new minimum length for this array.
+	 * @return <code>array</code>, if it can contain <code>length</code>
+	 * entries; otherwise, an array with
+	 * max(<code>length</code>,<code>array.length</code>/φ) entries whose first
+	 * <code>array.length</code> entries are the same as those of <code>array</code>.
+	 * */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] grow( final KEY_GENERIC_TYPE[] array, final int length ) {
+		if ( length > array.length ) {
+			final int newLength = (int)Math.min( Math.max( 2L * array.length, length ), Arrays.MAX_ARRAY_SIZE );
+			final KEY_GENERIC_TYPE t[] =
+#if #keyclass(Object)
+				newArray( array, newLength );
+#else
+				new KEY_TYPE[ newLength ];
+#endif
+			System.arraycopy( array, 0, t, 0, array.length );
+			return t;
+		}
+		return array;
+	}
+
+	/** Grows the given array to the maximum between the given length and
+	 * the current length multiplied by two, provided that the given
+	 * length is larger than the current length, preserving just a part of the array.
+	 *
+	 * <P>If you want complete control on the array growth, you
+	 * should probably use <code>ensureCapacity()</code> instead.
+	 *
+	 * @param array an array.
+	 * @param length the new minimum length for this array.
+	 * @param preserve the number of elements of the array that must be preserved in case a new allocation is necessary.
+	 * @return <code>array</code>, if it can contain <code>length</code>
+	 * entries; otherwise, an array with
+	 * max(<code>length</code>,<code>array.length</code>/φ) entries whose first
+	 * <code>preserve</code> entries are the same as those of <code>array</code>.
+	 * */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] grow( final KEY_GENERIC_TYPE[] array, final int length, final int preserve ) {
+
+		if ( length > array.length ) {
+			final int newLength = (int)Math.min( Math.max( 2L * array.length, length ), Arrays.MAX_ARRAY_SIZE );
+
+			final KEY_GENERIC_TYPE t[] =
+#if #keyclass(Object)
+				newArray( array, newLength );
+#else
+				new KEY_TYPE[ newLength ];
+#endif
+			System.arraycopy( array, 0, t, 0, preserve );
+
+			return t;
+		}
+		return array;
+
+	}
+
+	/** Trims the given array to the given length.
+	 *
+	 * @param array an array.
+	 * @param length the new maximum length for the array.
+	 * @return <code>array</code>, if it contains <code>length</code>
+	 * entries or less; otherwise, an array with
+	 * <code>length</code> entries whose entries are the same as
+	 * the first <code>length</code> entries of <code>array</code>.
+	 * 
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] trim( final KEY_GENERIC_TYPE[] array, final int length ) {
+		if ( length >= array.length ) return array;
+		final KEY_GENERIC_TYPE t[] =
+#if #keyclass(Object)
+			newArray( array, length );
+#else
+			length == 0 ? EMPTY_ARRAY : new KEY_TYPE[ length ];
+#endif
+		System.arraycopy( array, 0, t, 0, length );
+		return t;
+	}
+
+	/** Sets the length of the given array.
+	 *
+	 * @param array an array.
+	 * @param length the new length for the array.
+	 * @return <code>array</code>, if it contains exactly <code>length</code>
+	 * entries; otherwise, if it contains <em>more</em> than
+	 * <code>length</code> entries, an array with <code>length</code> entries
+	 * whose entries are the same as the first <code>length</code> entries of
+	 * <code>array</code>; otherwise, an array with <code>length</code> entries
+	 * whose first <code>array.length</code> entries are the same as those of
+	 * <code>array</code>.
+	 * 
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] setLength( final KEY_GENERIC_TYPE[] array, final int length ) {
+		if ( length == array.length ) return array;
+		if ( length < array.length ) return trim( array, length );
+		return ensureCapacity( array, length );
+	}
+
+	/** Returns a copy of a portion of an array.
+	 *
+	 * @param array an array.
+	 * @param offset the first element to copy.
+	 * @param length the number of elements to copy.
+	 * @return a new array containing <code>length</code> elements of <code>array</code> starting at <code>offset</code>.
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] copy( final KEY_GENERIC_TYPE[] array, final int offset, final int length ) {
+		ensureOffsetLength( array, offset, length );
+		final KEY_GENERIC_TYPE[] a = 
+#if #keyclass(Object)
+			newArray( array, length );
+#else
+			length == 0 ? EMPTY_ARRAY : new KEY_TYPE[ length ];
+#endif
+		System.arraycopy( array, offset, a, 0, length );
+		return a;
+	}
+
+	/** Returns a copy of an array.
+	 *
+	 * @param array an array.
+	 * @return a copy of <code>array</code>.
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] copy( final KEY_GENERIC_TYPE[] array ) {
+		return array.clone();
+	}
+
+	/** Fills the given array with the given value.
+	 *
+	 * <P>This method uses a backward loop. It is significantly faster than the corresponding
+	 * method in {@link java.util.Arrays}.
+	 *
+	 * @param array an array.
+	 * @param value the new value for all elements of the array.
+	 */
+
+	public static KEY_GENERIC void fill( final KEY_GENERIC_TYPE[] array, final KEY_GENERIC_TYPE value ) {
+		int i = array.length;
+		while( i-- != 0 ) array[ i ] = value;
+	}
+
+	/** Fills a portion of the given array with the given value.
+	 *
+	 * <P>If possible (i.e., <code>from</code> is 0) this method uses a
+	 * backward loop. In this case, it is significantly faster than the
+	 * corresponding method in {@link java.util.Arrays}.
+	 *
+	 * @param array an array.
+	 * @param from the starting index of the portion to fill (inclusive).
+	 * @param to the end index of the portion to fill (exclusive).
+	 * @param value the new value for all elements of the specified portion of the array.
+	 */
+
+	public static KEY_GENERIC void fill( final KEY_GENERIC_TYPE[] array, final int from, int to, final KEY_GENERIC_TYPE value ) {
+		ensureFromTo( array, from, to );
+		if ( from == 0 ) while( to-- != 0 ) array[ to ] = value;
+		else for( int i = from; i < to; i++ ) array[ i ] = value;
+	}
+
+
+
+	/** Returns true if the two arrays are elementwise equal.
+	 *
+	 * <P>This method uses a backward loop. It is significantly faster than the corresponding
+	 * method in {@link java.util.Arrays}.
+	 *
+	 * @param a1 an array.
+	 * @param a2 another array.
+	 * @return true if the two arrays are of the same length, and their elements are equal.
+	 */
+
+	public static KEY_GENERIC boolean equals( final KEY_GENERIC_TYPE[] a1, final KEY_GENERIC_TYPE a2[] ) {
+		int i = a1.length;
+		if ( i != a2.length ) return false;
+		while( i-- != 0 ) if (! KEY_EQUALS( a1[ i ], a2[ i ] ) ) return false;
+		return true;
+	}
+
+
+
+
+	/** Ensures that a range given by its first (inclusive) and last (exclusive) elements fits an array.
+	 *
+	 * <P>This method may be used whenever an array range check is needed.
+	 *
+	 * @param a an array.
+	 * @param from a start index (inclusive).
+	 * @param to an end index (exclusive).
+	 * @throws IllegalArgumentException if <code>from</code> is greater than <code>to</code>.
+	 * @throws ArrayIndexOutOfBoundsException if <code>from</code> or <code>to</code> are greater than the array length or negative.
+	 */
+	public static KEY_GENERIC void ensureFromTo( final KEY_GENERIC_TYPE[] a, final int from, final int to ) {
+		Arrays.ensureFromTo( a.length, from, to );
+	}
+
+	/** Ensures that a range given by an offset and a length fits an array.
+	 *
+	 * <P>This method may be used whenever an array range check is needed.
+	 *
+	 * @param a an array.
+	 * @param offset a start index.
+	 * @param length a length (the number of elements in the range).
+	 * @throws IllegalArgumentException if <code>length</code> is negative.
+	 * @throws ArrayIndexOutOfBoundsException if <code>offset</code> is negative or <code>offset</code>+<code>length</code> is greater than the array length.
+	 */
+	public static KEY_GENERIC void ensureOffsetLength( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) {
+		Arrays.ensureOffsetLength( a.length, offset, length );
+	}
+
+	private static final int SMALL = 7;
+	private static final int MEDIUM = 50;
+
+	private static KEY_GENERIC void swap( final KEY_GENERIC_TYPE x[], final int a, final int b ) {
+		final KEY_GENERIC_TYPE t = x[ a ];
+		x[ a ] = x[ b ];
+		x[ b ] = t;
+	}
+
+	private static KEY_GENERIC void vecSwap( final KEY_GENERIC_TYPE[] x, int a, int b, final int n ) {
+		for( int i = 0; i < n; i++, a++, b++ ) swap( x, a, b );
+	}
+ 
+	private static KEY_GENERIC int med3( final KEY_GENERIC_TYPE x[], final int a, final int b, final int c, KEY_COMPARATOR KEY_GENERIC comp ) {
+		int ab = comp.compare( x[ a ], x[ b ] );
+		int ac = comp.compare( x[ a ], x[ c ] );
+		int bc = comp.compare( x[ b ], x[ c ] );
+		return ( ab < 0 ?
+			( bc < 0 ? b : ac < 0 ? c : a ) :
+			( bc > 0 ? b : ac > 0 ? c : a ) );
+	}
+
+
+	private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[] a, final int from, final int to, final KEY_COMPARATOR KEY_GENERIC comp ) {
+		for( int i = from; i < to - 1; i++ ) {
+			int m = i;
+			for( int j = i + 1; j < to; j++ ) if ( comp.compare( a[ j ], a[ m ] ) < 0 ) m = j;
+			if ( m != i ) {
+				final KEY_GENERIC_TYPE u = a[ i ];
+				a[ i ] = a[ m ];
+				a[ m ] = u;
+			}
+		}
+	}
+
+
+	private static KEY_GENERIC void insertionSort( final KEY_GENERIC_TYPE[] a, final int from, final int to, final KEY_COMPARATOR KEY_GENERIC comp  ) {
+		for ( int i = from; ++i < to; ) { 
+			KEY_GENERIC_TYPE t = a[ i ];
+			int j = i;
+			for ( KEY_GENERIC_TYPE u = a[ j - 1 ]; comp.compare( t, u ) < 0; u = a[ --j - 1 ] ) {
+				a[ j ] = u;
+				if ( from == j - 1 ) {
+					--j;
+					break;
+				}
+			}
+			a[ j ] = t;
+		}
+	}
+
+	@SuppressWarnings("unchecked")
+	private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[] a, final int from, final int to ) {
+		for( int i = from; i < to - 1; i++ ) {
+			int m = i;
+			for( int j = i + 1; j < to; j++ ) if ( KEY_LESS( a[ j ], a[ m ] ) ) m = j;
+			if ( m != i ) {
+				final KEY_GENERIC_TYPE u = a[ i ];
+				a[ i ] = a[ m ];
+				a[ m ] = u;
+			}
+		}
+	}
+	
+	@SuppressWarnings("unchecked")
+	private static KEY_GENERIC void insertionSort( final KEY_GENERIC_TYPE[] a, final int from, final int to ) {
+		for ( int i = from; ++i < to; ) { 
+			KEY_GENERIC_TYPE t = a[ i ];
+			int j = i;
+			for ( KEY_GENERIC_TYPE u = a[ j - 1 ]; KEY_LESS( t, u ); u = a[ --j - 1 ] ) {
+				a[ j ] = u;
+				if ( from == j - 1 ) {
+					--j;
+					break;
+				}
+			}
+			a[ j ] = t;
+		}
+	}
+
+
+	/** Sorts the specified range of elements according to the order induced by the specified
+	 * comparator using quicksort. 
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param x the array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param comp the comparator to determine the sorting order.
+	 * 
+	 */
+	public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x, final int from, final int to, final KEY_COMPARATOR KEY_GENERIC comp ) {
+		final int len = to - from;
+		
+		// Selection sort on smallest arrays
+		if ( len < SMALL ) {
+			selectionSort( x, from, to, comp );
+			return;
+		}
+
+		// Choose a partition element, v
+		int m = from + len / 2;	 // Small arrays, middle element
+		if ( len > SMALL ) {
+			int l = from;
+			int n = to - 1;
+			if ( len > MEDIUM ) {		// Big arrays, pseudomedian of 9
+				int s = len / 8;
+				l = med3( x, l, l + s, l + 2 * s, comp );
+				m = med3( x, m - s, m, m + s, comp );
+				n = med3( x, n - 2 * s, n - s, n, comp );
+			}
+			m = med3( x, l, m, n, comp ); // Mid-size, med of 3
+		}
+		
+		final KEY_GENERIC_TYPE v = x[ m ];
+
+		// Establish Invariant: v* (<v)* (>v)* v*
+		int a = from, b = a, c = to - 1, d = c;
+		while(true) {
+			int comparison;
+			while ( b <= c && ( comparison = comp.compare( x[ b ], v ) ) <= 0 ) {
+				if ( comparison == 0 ) swap( x, a++, b );
+				b++;
+			}
+			while (c >= b && ( comparison = comp.compare( x[ c ], v ) ) >=0 ) {
+				if ( comparison == 0 ) swap( x, c, d-- );
+				c--;
+			}
+			if ( b > c ) break;
+			swap( x, b++, c-- );
+		}
+
+		// Swap partition elements back to middle
+		int s, n = to;
+		s = Math.min( a - from, b - a );
+		vecSwap( x, from, b - s, s );
+		s = Math.min( d - c, n - d - 1 );
+		vecSwap( x, b, n - s, s );
+
+		// Recursively sort non-partition-elements
+		if ( ( s = b - a ) > 1 ) quickSort( x, from, from + s, comp );
+		if ( ( s = d - c ) > 1 ) quickSort( x, n - s, n, comp );
+
+	}
+
+	/** Sorts an array according to the order induced by the specified
+	 * comparator using quicksort. 
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param x the array to be sorted.
+	 * @param comp the comparator to determine the sorting order.
+	 * 
+	 */
+	public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x, final KEY_COMPARATOR KEY_GENERIC comp ) {
+		quickSort( x, 0, x.length, comp );	
+	}
+	
+
+	@SuppressWarnings("unchecked")
+	private static KEY_GENERIC int med3( final KEY_GENERIC_TYPE x[], final int a, final int b, final int c ) {
+		int ab = KEY_CMP( x[ a ], x[ b ] );
+		int ac = KEY_CMP( x[ a ], x[ c ] );
+		int bc = KEY_CMP( x[ b ], x[ c ] );
+		return ( ab < 0 ?
+			( bc < 0 ? b : ac < 0 ? c : a ) :
+			( bc > 0 ? b : ac > 0 ? c : a ) );
+	}
+
+
+	/** Sorts the specified range of elements according to the natural ascending order using quicksort.
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param x the array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @deprecated Use the corresponding {@code sort()} method in {@link java.util.Arrays}.
+	 */
+
+	@SuppressWarnings("unchecked")
+	@Deprecated
+	public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x, final int from, final int to ) {
+		final int len = to - from;
+
+		// Selection sort on smallest arrays
+		if ( len < SMALL ) {
+			selectionSort( x, from, to );
+			return;
+		}
+
+		// Choose a partition element, v
+		int m = from + len / 2;	 // Small arrays, middle element
+		if ( len > SMALL ) {
+			int l = from;
+			int n = to - 1;
+			if ( len > MEDIUM ) {		// Big arrays, pseudomedian of 9
+				int s = len / 8;
+				l = med3( x, l, l + s, l + 2 * s );
+				m = med3( x, m - s, m, m + s );
+				n = med3( x, n - 2 * s, n - s, n );
+			}
+			m = med3( x, l, m, n ); // Mid-size, med of 3
+		}
+		
+		final KEY_GENERIC_TYPE v = x[ m ];
+
+		// Establish Invariant: v* (<v)* (>v)* v*
+		int a = from, b = a, c = to - 1, d = c;
+		while(true) {
+			int comparison;
+			while ( b <= c && ( comparison = KEY_CMP( x[ b ], v ) ) <= 0 ) {
+				if ( comparison == 0 ) swap( x, a++, b );
+				b++;
+			}
+			while (c >= b && ( comparison = KEY_CMP( x[ c ], v ) ) >=0 ) {
+				if ( comparison == 0 ) swap( x, c, d-- );
+				c--;
+			}
+			if ( b > c ) break;
+			swap( x, b++, c-- );
+		}
+
+		// Swap partition elements back to middle
+		int s, n = to;
+		s = Math.min( a - from, b - a );
+		vecSwap( x, from, b - s, s );
+		s = Math.min( d - c, n - d - 1 );
+		vecSwap( x, b, n - s, s );
+
+		// Recursively sort non-partition-elements
+		if ( ( s = b - a ) > 1 ) quickSort( x, from, from + s );
+		if ( ( s = d - c ) > 1 ) quickSort( x, n - s, n );
+
+	}
+
+	/** Sorts an array according to the natural ascending order using quicksort.
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param x the array to be sorted.
+	 * 
+	 * @deprecated Use the corresponding {@code sort()} method in {@link java.util.Arrays}.
+	 */
+	@Deprecated
+	public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[] x ) {
+		quickSort( x, 0, x.length );	
+	}
+	
+	/** Sorts the specified range of elements according to the natural ascending order using mergesort, using a given support array.
+	 * 
+	 * <p>This sort is guaranteed to be <i>stable</i>: equal elements will not be reordered as a result
+	 * of the sort. Moreover, no support arrays will be allocated. 
+	 
+	 * @param a the array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param supp a support array containing at least <code>to</code> elements.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], final int from, final int to, final KEY_GENERIC_TYPE supp[] ) {
+		int len = to - from;
+		
+		// Insertion sort on smallest arrays
+		if ( len < SMALL ) {
+			insertionSort( a, from, to );
+			return;
+		}
+	
+		// Recursively sort halves of a into supp
+		final int mid = ( from + to ) >>> 1;
+		mergeSort( supp, from, mid, a );
+		mergeSort( supp, mid, to, a );
+	
+		// If list is already sorted, just copy from supp to a.  This is an
+		// optimization that results in faster sorts for nearly ordered lists.
+		if ( KEY_LESSEQ( supp[ mid - 1 ], supp[ mid ] ) ) {
+			System.arraycopy( supp, from, a, from, len );
+			return;
+		}
+	
+		// Merge sorted halves (now in supp) into a
+		for( int i = from, p = from, q = mid; i < to; i++ ) {
+			if ( q >= to || p < mid && KEY_LESSEQ( supp[ p ], supp[ q ] ) ) a[ i ] = supp[ p++ ];
+			else a[ i ] = supp[ q++ ];
+		}
+	}
+
+	/** Sorts the specified range of elements according to the natural ascending order using mergesort.
+	 * 
+	 * <p>This sort is guaranteed to be <i>stable</i>: equal elements will not be reordered as a result
+	 * of the sort. An array as large as <code>a</code> will be allocated by this method.
+	 
+	 * @param a the array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 */
+	public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], final int from, final int to ) {
+		mergeSort( a, from, to, a.clone() );
+	}
+
+	/**	Sorts an array according to the natural ascending order using mergesort.
+	 * 
+	 * <p>This sort is guaranteed to be <i>stable</i>: equal elements will not be reordered as a result
+	 * of the sort. An array as large as <code>a</code> will be allocated by this method.
+	 
+	 * @param a the array to be sorted.
+	 */
+	public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[] ) {
+		mergeSort( a, 0, a.length );
+	}
+
+	/** Sorts the specified range of elements according to the order induced by the specified
+	 * comparator using mergesort, using a given support array.
+	 * 
+	 * <p>This sort is guaranteed to be <i>stable</i>: equal elements will not be reordered as a result
+	 * of the sort. Moreover, no support arrays will be allocated.
+	 
+	 * @param a the array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param comp the comparator to determine the sorting order.
+	 * @param supp a support array containing at least <code>to</code> elements.
+	 */
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], final int from, final int to, KEY_COMPARATOR KEY_GENERIC comp, final KEY_GENERIC_TYPE supp[] ) {
+		int len = to - from;
+		
+		// Insertion sort on smallest arrays
+		if ( len < SMALL ) {
+			insertionSort( a, from, to, comp );
+			return;
+    	}
+	
+		// Recursively sort halves of a into supp
+		final int mid = ( from + to ) >>> 1;
+		mergeSort( supp, from, mid, comp, a );
+		mergeSort( supp, mid, to, comp, a );
+	
+		// If list is already sorted, just copy from supp to a.  This is an
+		// optimization that results in faster sorts for nearly ordered lists.
+		if ( comp.compare( supp[ mid - 1 ], supp[ mid ] ) <= 0 ) {
+			System.arraycopy( supp, from, a, from, len );
+			return;
+		}
+	
+		// Merge sorted halves (now in supp) into a
+		for( int i = from, p = from, q = mid; i < to; i++ ) {
+			if ( q >= to || p < mid && comp.compare( supp[ p ], supp[ q ] ) <= 0 ) a[ i ] = supp[ p++ ];
+			else a[ i ] = supp[ q++ ];
+		}
+	}
+
+	/** Sorts the specified range of elements according to the order induced by the specified
+	 * comparator using mergesort.
+	 * 
+	 * <p>This sort is guaranteed to be <i>stable</i>: equal elements will not be reordered as a result
+	 * of the sort. An array as large as <code>a</code> will be allocated by this method.
+	 *
+	 * @param a the array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param comp the comparator to determine the sorting order.
+	 */
+	public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], final int from, final int to, KEY_COMPARATOR KEY_GENERIC comp ) {
+		mergeSort( a, from, to, comp, a.clone() );
+	}
+
+	/** Sorts an array according to the order induced by the specified
+	 * comparator using mergesort.
+	 * 
+	 * <p>This sort is guaranteed to be <i>stable</i>: equal elements will not be reordered as a result
+	 * of the sort.  An array as large as <code>a</code> will be allocated by this method.
+	 
+	 * @param a the array to be sorted.
+	 * @param comp the comparator to determine the sorting order.
+	 */
+	public static KEY_GENERIC void mergeSort( final KEY_GENERIC_TYPE a[], KEY_COMPARATOR KEY_GENERIC comp ) {
+		mergeSort( a, 0, a.length, comp );
+	}
+
+#if ! #keyclass(Boolean)
+
+	/**
+	 * Searches a range of the specified array for the specified value using 
+	 * the binary search algorithm. The range must be sorted prior to making this call. 
+	 * If it is not sorted, the results are undefined. If the range contains multiple elements with 
+	 * the specified value, there is no guarantee which one will be found.
+	 *
+	 * @param a the array to be searched.
+	 * @param from  the index of the first element (inclusive) to be searched.
+	 * @param to  the index of the last element (exclusive) to be searched.
+	 * @param key the value to be searched for.
+	 * @return index of the search key, if it is contained in the array;
+	 *             otherwise, <samp>(-(<i>insertion point</i>) - 1)</samp>.  The <i>insertion
+	 *             point</i> is defined as the the point at which the value would
+	 *             be inserted into the array: the index of the first
+	 *             element greater than the key, or the length of the array, if all
+	 *             elements in the array are less than the specified key.  Note
+	 *             that this guarantees that the return value will be >= 0 if
+	 *             and only if the key is found.
+	 * @see java.util.Arrays
+	 */
+	@SuppressWarnings({"unchecked","rawtypes"})
+	public static KEY_GENERIC int binarySearch( final KEY_GENERIC_TYPE[] a, int from, int to, final KEY_GENERIC_TYPE key ) {
+		KEY_GENERIC_TYPE midVal;
+		to--;
+		while (from <= to) {
+			final int mid = (from + to) >>> 1;
+			midVal = a[ mid ];
+#if #keys(primitive)
+			if (midVal < key) from = mid + 1;
+			else if (midVal > key) to = mid - 1;
+			else return mid;
+#else
+			final int cmp = ((Comparable)midVal).compareTo( key );
+			if ( cmp < 0 ) from = mid + 1;
+			else if (cmp > 0) to = mid - 1;
+			else return mid;
+#endif
+        }
+		return -( from + 1 );
+	}
+
+	/**
+	 * Searches an array for the specified value using 
+	 * the binary search algorithm. The range must be sorted prior to making this call. 
+	 * If it is not sorted, the results are undefined. If the range contains multiple elements with 
+	 * the specified value, there is no guarantee which one will be found.
+	 *
+	 * @param a the array to be searched.
+	 * @param key the value to be searched for.
+	 * @return index of the search key, if it is contained in the array;
+	 *             otherwise, <samp>(-(<i>insertion point</i>) - 1)</samp>.  The <i>insertion
+	 *             point</i> is defined as the the point at which the value would
+	 *             be inserted into the array: the index of the first
+	 *             element greater than the key, or the length of the array, if all
+	 *             elements in the array are less than the specified key.  Note
+	 *             that this guarantees that the return value will be >= 0 if
+	 *             and only if the key is found.
+	 * @see java.util.Arrays
+	 */
+	public static KEY_GENERIC int binarySearch( final KEY_GENERIC_TYPE[] a, final KEY_GENERIC_TYPE key ) {
+		return binarySearch( a, 0, a.length, key );
+	}
+
+	/**
+	 * Searches a range of the specified array for the specified value using 
+	 * the binary search algorithm and a specified comparator. The range must be sorted following the comparator prior to making this call. 
+	 * If it is not sorted, the results are undefined. If the range contains multiple elements with 
+	 * the specified value, there is no guarantee which one will be found.
+	 *
+	 * @param a the array to be searched.
+	 * @param from  the index of the first element (inclusive) to be searched.
+	 * @param to  the index of the last element (exclusive) to be searched.
+	 * @param key the value to be searched for.
+	 * @param c a comparator.
+	 * @return index of the search key, if it is contained in the array;
+	 *             otherwise, <samp>(-(<i>insertion point</i>) - 1)</samp>.  The <i>insertion
+	 *             point</i> is defined as the the point at which the value would
+	 *             be inserted into the array: the index of the first
+	 *             element greater than the key, or the length of the array, if all
+	 *             elements in the array are less than the specified key.  Note
+	 *             that this guarantees that the return value will be >= 0 if
+	 *             and only if the key is found.
+	 * @see java.util.Arrays
+	 */
+	public static KEY_GENERIC int binarySearch( final KEY_GENERIC_TYPE[] a, int from, int to, final KEY_GENERIC_TYPE key, final KEY_COMPARATOR KEY_GENERIC c ) {
+		KEY_GENERIC_TYPE midVal;
+		to--;
+		while (from <= to) {
+			final int mid = (from + to) >>> 1;
+			midVal = a[ mid ];
+			final int cmp = c.compare( midVal, key );
+			if ( cmp < 0 ) from = mid + 1;
+			else if (cmp > 0) to = mid - 1;
+			else return mid; // key found
+		}
+		return -( from + 1 );
+	}
+
+	/**
+	 * Searches an array for the specified value using 
+	 * the binary search algorithm and a specified comparator. The range must be sorted following the comparator prior to making this call. 
+	 * If it is not sorted, the results are undefined. If the range contains multiple elements with 
+	 * the specified value, there is no guarantee which one will be found.
+	 *
+	 * @param a the array to be searched.
+	 * @param key the value to be searched for.
+	 * @param c a comparator.
+	 * @return index of the search key, if it is contained in the array;
+	 *             otherwise, <samp>(-(<i>insertion point</i>) - 1)</samp>.  The <i>insertion
+	 *             point</i> is defined as the the point at which the value would
+	 *             be inserted into the array: the index of the first
+	 *             element greater than the key, or the length of the array, if all
+	 *             elements in the array are less than the specified key.  Note
+	 *             that this guarantees that the return value will be >= 0 if
+	 *             and only if the key is found.
+	 * @see java.util.Arrays
+	 */
+	public static KEY_GENERIC int binarySearch( final KEY_GENERIC_TYPE[] a, final KEY_GENERIC_TYPE key, final KEY_COMPARATOR KEY_GENERIC c ) {
+		return binarySearch( a, 0, a.length, key, c );
+	}
+
+
+#if #keys(primitive)
+	/** The size of a digit used during radix sort (must be a power of 2). */
+	private static final int DIGIT_BITS = 8;
+	/** The mask to extract a digit of {@link #DIGIT_BITS} bits. */
+	private static final int DIGIT_MASK = ( 1 << DIGIT_BITS ) - 1;
+	/** The number of digits per element. */
+	private static final int DIGITS_PER_ELEMENT = KEY_CLASS.SIZE / DIGIT_BITS;
+
+	/** This method fixes negative numbers so that the combination exponent/significand is lexicographically sorted. */
+#if #keyclass(Double)
+	private static final long fixDouble( final double d ) {
+		final long l = Double.doubleToLongBits( d );
+		return l >= 0 ? l : l ^ 0x7FFFFFFFFFFFFFFFL;
+	}	   
+#elif #keyclass(Float)
+	private static final long fixFloat( final float f ) {
+		final long i = Float.floatToIntBits( f );
+		return i >= 0 ? i : i ^ 0x7FFFFFFF;
+	}
+#endif
+
+
+	/** Sorts the specified array using radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. 
+	 * It will allocate a support array of bytes with the same number of elements as the array to be sorted.
+	 * 
+	 * @param a the array to be sorted.
+	 */
+	public static void radixSort( final KEY_TYPE[] a ) {
+		radixSort( a, 0, a.length );
+	}
+
+	/** Sorts the specified array using radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. 
+	 * It will allocate a support array of bytes with the same number of elements as the array to be sorted.
+	 * 
+	 * @param a the array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 */
+	public static void radixSort( final KEY_TYPE[] a, final int from, final int to ) {
+		final int maxLevel = DIGITS_PER_ELEMENT - 1;
+
+		final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( DIGITS_PER_ELEMENT - 1 ) + 1;
+		final int[] offsetStack = new int[ stackSize ];
+		int offsetPos = 0;
+		final int[] lengthStack = new int[ stackSize ];
+		int lengthPos = 0;
+		final int[] levelStack = new int[ stackSize ];
+		int levelPos = 0;
+		
+		offsetStack[ offsetPos++ ] = from;
+		lengthStack[ lengthPos++ ] = to - from;
+		levelStack[ levelPos++ ] = 0;
+
+		final int[] count = new int[ 1 << DIGIT_BITS ];
+		final int[] pos = new int[ 1 << DIGIT_BITS ];
+		final byte[] digit = new byte[ to - from ];
+
+		while( offsetPos > 0 ) {
+			final int first = offsetStack[ --offsetPos ];
+			final int length = lengthStack[ --lengthPos ];
+			final int level = levelStack[ --levelPos ];
+#if #keyclass(Character)
+			final int signMask = 0;
+#else
+			final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0;
+#endif
+			
+			if ( length < MEDIUM ) {
+				selectionSort( a, first, first + length );
+				continue;
+			}
+			
+			final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key
+
+			// Count keys.
+
+			for( int i = length; i-- != 0; ) digit[ i ] = (byte)( ( ( KEY2LEXINT( a[ first + i ] ) >>> shift ) & DIGIT_MASK ) ^ signMask );
+			for( int i = length; i-- != 0; ) count[ digit[ i ] & 0xFF ]++;
+			// Compute cumulative distribution and push non-singleton keys on stack.
+			int lastUsed = -1;
+			
+			for( int i = 0, p = 0; i < 1 << DIGIT_BITS; i++ ) {
+				if ( count[ i ] != 0 ) {
+					lastUsed = i;
+					if ( level < maxLevel && count[ i ] > 1 ){
+						//System.err.println( " Pushing " + new StackEntry( first + pos[ i - 1 ], first + pos[ i ], level + 1 ) );
+						offsetStack[ offsetPos++ ] = p + first;
+						lengthStack[ lengthPos++ ] = count[ i ];
+						levelStack[ levelPos++ ] = level + 1;
+					}
+				}
+				pos[ i ] = ( p += count[ i ] );
+			}
+			
+			// When all slots are OK, the last slot is necessarily OK.
+			final int end = length - count[ lastUsed ];
+			count[ lastUsed ] = 0;
+
+			// i moves through the start of each block
+			for( int i = 0, c = -1, d; i < end; i += count[ c ], count[ c ] = 0 ) {
+				KEY_TYPE t = a[ i + first ];
+				c = digit[ i ] & 0xFF;
+				while( ( d = --pos[ c ] ) > i ) {
+					final KEY_TYPE z = t;
+					final int zz = c;
+					t = a[ d + first ];
+					c = digit[ d ] & 0xFF;
+					a[ d + first ] = z;
+					digit[ d ] = (byte)zz;
+				}
+
+				a[ i + first ] = t;
+			}
+		}
+	}
+
+
+
+	private static KEY_GENERIC void insertionSortIndirect( final int[] perm, final KEY_TYPE[] a, final int from, final int to ) {
+		for ( int i = from; ++i < to; ) { 
+			int t = perm[ i ];
+			int j = i;
+			for ( int u = perm[ j - 1 ]; KEY_LESS( a[ t ], a[ u ] ); u = perm[ --j - 1 ] ) {
+				perm[ j ] = u;
+				if ( from == j - 1 ) {
+					--j;
+					break;
+				}
+			}
+			perm[ j ] = t;
+		}
+	}
+
+	/** Sorts the specified array using indirect radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implement an <em>indirect</em> sort. The elements of <code>perm</code> (which must
+	 * be exactly the numbers in the interval <code>[0..perm.length)</code>) will be permuted so that
+	 * <code>a[ perm[ i ] ] <= a[ perm[ i + 1 ] ]</code>.
+	 *
+	 * <p>This implementation is significantly faster than quicksort (unstable) or mergesort (stable)
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. 
+	 * It will allocate a support array of bytes with the same number of elements as the array to be sorted,
+	 * and, in the stable case, a further support array as large as <code>perm</code> (note that the stable
+	 * version is slightly faster).
+	 * 
+	 * @param perm a permutation array indexing <code>a</code>.
+	 * @param a the array to be sorted.
+	 * @param stable whether the sorting algorithm should be stable.
+	 */
+	public static void radixSortIndirect( final int[] perm, final KEY_TYPE[] a, final boolean stable ) {
+		radixSortIndirect( perm, a, 0, perm.length, stable );
+	}
+
+	/** Sorts the specified array using indirect radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implement an <em>indirect</em> sort. The elements of <code>perm</code> (which must
+	 * be exactly the numbers in the interval <code>[0..perm.length)</code>) will be permuted so that
+	 * <code>a[ perm[ i ] ] <= a[ perm[ i + 1 ] ]</code>.
+	 *
+	 * <p>This implementation is significantly faster than quicksort (unstable) or mergesort (stable)
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. 
+	 * It will allocate a support array of bytes with the same number of elements as the array to be sorted,
+	 * and, in the stable case, a further support array as large as <code>perm</code> (note that the stable
+	 * version is slightly faster).
+	 * 
+	 * @param perm a permutation array indexing <code>a</code>.
+	 * @param a the array to be sorted.
+	 * @param from the index of the first element of <code>perm</code> (inclusive) to be permuted.
+	 * @param to the index of the last element of <code>perm</code> (exclusive) to be permuted.
+	 * @param stable whether the sorting algorithm should be stable.
+	 */
+	public static void radixSortIndirect( final int[] perm, final KEY_TYPE[] a, final int from, final int to, final boolean stable ) {
+		final int maxLevel = DIGITS_PER_ELEMENT - 1;
+
+		final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( DIGITS_PER_ELEMENT - 1 ) + 1;
+		final int[] offsetStack = new int[ stackSize ];
+		int offsetPos = 0;
+		final int[] lengthStack = new int[ stackSize ];
+		int lengthPos = 0;
+		final int[] levelStack = new int[ stackSize ];
+		int levelPos = 0;
+		
+		offsetStack[ offsetPos++ ] = from;
+		lengthStack[ lengthPos++ ] = to - from;
+		levelStack[ levelPos++ ] = 0;
+
+		final int[] count = new int[ 1 << DIGIT_BITS ];
+		final int[] pos = stable ? null : new int[ 1 << DIGIT_BITS ];
+		final int[] support = stable ? new int[ perm.length ] : null;
+		final byte[] digit = new byte[ to - from ];
+
+		while( offsetPos > 0 ) {
+			final int first = offsetStack[ --offsetPos ];
+			final int length = lengthStack[ --lengthPos ];
+			final int level = levelStack[ --levelPos ];
+#if #keyclass(Character)
+			final int signMask = 0;
+#else
+			final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0;
+#endif
+			
+			if ( length < MEDIUM ) {
+				insertionSortIndirect( perm, a, first, first + length );
+				continue;
+			}
+			
+			final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key
+
+			// Count keys.
+			for( int i = length; i-- != 0; ) digit[ i ] = (byte)( ( ( KEY2LEXINT( a[ perm[ first + i ] ] ) >>> shift ) & DIGIT_MASK ) ^ signMask );
+			for( int i = length; i-- != 0; ) count[ digit[ i ] & 0xFF ]++;
+			// Compute cumulative distribution and push non-singleton keys on stack.
+			int lastUsed = -1;
+			
+			for( int i = 0, p = 0; i < 1 << DIGIT_BITS; i++ ) {
+				if ( count[ i ] != 0 ) {
+					lastUsed = i;
+					if ( level < maxLevel && count[ i ] > 1 ){
+						offsetStack[ offsetPos++ ] = p + first;
+						lengthStack[ lengthPos++ ] = count[ i ];
+						levelStack[ levelPos++ ] = level + 1;
+					}
+				}
+				if ( stable ) count[ i ] = p += count[ i ];
+				else pos[ i ] = ( p += count[ i ] );
+			}
+			
+			if ( stable ) {
+				for( int i = length; i-- != 0; ) support[ --count[ digit[ i ] & 0xFF ] ] = perm[ first + i ];
+				System.arraycopy( support, 0, perm, first, length );
+				it.unimi.dsi.fastutil.ints.IntArrays.fill( count, 0 );
+			}
+			else {
+				// When all slots are OK, the last slot is necessarily OK.
+				final int end = length - count[ lastUsed ];
+				count[ lastUsed ] = 0;
+				// i moves through the start of each block
+				for( int i = 0, c = -1, d; i < end; i += count[ c ], count[ c ] = 0 ) {
+					int t = perm[ i + first ];
+					c = digit[ i ] & 0xFF;
+					while( ( d = --pos[ c ] ) > i ) {
+						final int z = t;
+						final int zz = c;
+						t = perm[ d + first ];
+						c = digit[ d ] & 0xFF;
+						perm[ d + first ] = z;
+						digit[ d ] = (byte)zz;
+					}
+
+					perm[ i + first ] = t;
+				}
+			}
+		}
+	}
+
+
+
+	private static void selectionSort( final KEY_TYPE[] a, final KEY_TYPE[] b, final int from, final int to ) {
+		for( int i = from; i < to - 1; i++ ) {
+			int m = i;
+			for( int j = i + 1; j < to; j++ ) 
+				if ( a[ j ] < a[ m ] || a[ j ] == a[ m ] && b[ j ] < b[ m ] ) m = j;
+			
+			if ( m != i ) {
+				KEY_TYPE t = a[ i ];
+				a[ i ] = a[ m ];
+				a[ m ] = t;
+				t = b[ i ];
+				b[ i ] = b[ m ];
+				b[ m ] = t;
+			}
+		}
+	}
+
+	/** Sorts the specified pair of arrays lexicographically using radix sort.
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implements a <em>lexicographical</em> sorting of the arguments. Pairs of elements
+	 * in the same position in the two provided arrays will be considered a single key, and permuted
+	 * accordingly. In the end, either <code>a[ i ] < a[ i + 1 ]</code> or <code>a[ i ] == a[ i + 1 ]</code> and <code>b[ i ] <= b[ i + 1 ]</code>.
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. It will allocate a support array of bytes with the same number of elements as the arrays to be sorted.
+	 * 
+	 * @param a the first array to be sorted.
+	 * @param b the second array to be sorted.
+	 */
+
+	public static void radixSort( final KEY_TYPE[] a, final KEY_TYPE[] b ) {
+		radixSort( a, b, 0, a.length );
+	}
+	
+	/** Sorts the specified pair of arrays lexicographically using radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implements a <em>lexicographical</em> sorting of the arguments. Pairs of elements
+	 * in the same position in the two provided arrays will be considered a single key, and permuted
+	 * accordingly. In the end, either <code>a[ i ] < a[ i + 1 ]</code> or <code>a[ i ] == a[ i + 1 ]</code> and <code>b[ i ] <= b[ i + 1 ]</code>.
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. It will allocate a support array of bytes with the same number of elements as the arrays to be sorted.
+	 * 
+	 * @param a the first array to be sorted.
+	 * @param b the second array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 */
+	public static void radixSort( final KEY_TYPE[] a, final KEY_TYPE[] b, final int from, final int to ) {
+		final int layers = 2;
+		if ( a.length != b.length ) throw new IllegalArgumentException( "Array size mismatch." );
+		final int maxLevel = DIGITS_PER_ELEMENT * layers - 1;
+		
+		final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( layers * DIGITS_PER_ELEMENT - 1 ) + 1;
+		final int[] offsetStack = new int[ stackSize ];
+		int offsetPos = 0;
+		final int[] lengthStack = new int[ stackSize ];
+		int lengthPos = 0;
+		final int[] levelStack = new int[ stackSize ];
+		int levelPos = 0;
+		
+		offsetStack[ offsetPos++ ] = from;
+		lengthStack[ lengthPos++ ] = to - from;
+		levelStack[ levelPos++ ] = 0;
+
+		final int[] count = new int[ 1 << DIGIT_BITS ];
+		final int[] pos = new int[ 1 << DIGIT_BITS ];
+		final byte[] digit = new byte[ to - from ];
+
+		while( offsetPos > 0 ) {
+			final int first = offsetStack[ --offsetPos ];
+			final int length = lengthStack[ --lengthPos ];
+			final int level = levelStack[ --levelPos ];
+#if #keyclass(Character)
+			final int signMask = 0;
+#else
+			final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0;
+#endif
+			
+			if ( length < MEDIUM ) {
+				selectionSort( a, b, first, first + length );
+				continue;
+			}
+			
+			final KEY_TYPE[] k = level < DIGITS_PER_ELEMENT ? a : b; // This is the key array
+			final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key
+
+			// Count keys.
+			for( int i = length; i-- != 0; ) digit[ i ] = (byte)( ( ( KEY2LEXINT( k[ first + i ] ) >>> shift ) & DIGIT_MASK ) ^ signMask );
+			for( int i = length; i-- != 0; ) count[ digit[ i ] & 0xFF ]++;
+			// Compute cumulative distribution and push non-singleton keys on stack.
+			int lastUsed = -1;
+
+			for( int i = 0, p = 0; i < 1 << DIGIT_BITS; i++ ) {
+				if ( count[ i ] != 0 ) {
+					lastUsed = i;
+					if ( level < maxLevel && count[ i ] > 1 ){
+						offsetStack[ offsetPos++ ] = p + first;
+						lengthStack[ lengthPos++ ] = count[ i ];
+						levelStack[ levelPos++ ] = level + 1;
+					}
+				}
+				pos[ i ] = ( p += count[ i ] );
+			}
+
+			// When all slots are OK, the last slot is necessarily OK.
+			final int end = length - count[ lastUsed ];
+			count[ lastUsed ] = 0;
+			
+			// i moves through the start of each block
+			for( int i = 0, c = -1, d; i < end; i += count[ c ], count[ c ] = 0 ) {
+				KEY_TYPE t = a[ i + first ];
+				KEY_TYPE u = b[ i + first ];
+				c = digit[ i ] & 0xFF;
+				while( ( d = --pos[ c ] ) > i ) {
+					KEY_TYPE z = t;
+					final int zz = c;
+					t = a[ d + first ];
+					a[ d + first ] = z;
+					z = u;
+					u = b[ d + first ];
+					b[ d + first ] = z;
+					c = digit[ d ] & 0xFF;
+					digit[ d ] = (byte)zz;
+				}
+
+				a[ i + first ] = t;
+				b[ i + first ] = u;
+			}
+		}
+	}
+
+
+
+
+	private static KEY_GENERIC void insertionSortIndirect( final int[] perm, final KEY_TYPE[] a, final KEY_TYPE[] b, final int from, final int to ) {
+		for ( int i = from; ++i < to; ) { 
+			int t = perm[ i ];
+			int j = i;
+			for ( int u = perm[ j - 1 ]; KEY_LESS( a[ t ], a[ u ] ) || KEY_CMP_EQ( a[ t ], a[ u ] ) && KEY_LESS( b[ t ], b[ u ] ); u = perm[ --j - 1 ] ) {
+				perm[ j ] = u;
+				if ( from == j - 1 ) {
+					--j;
+					break;
+				}
+			}
+			perm[ j ] = t;
+		}
+	}
+
+	/** Sorts the specified pair of arrays lexicographically using indirect radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implement an <em>indirect</em> sort. The elements of <code>perm</code> (which must
+	 * be exactly the numbers in the interval <code>[0..perm.length)</code>) will be permuted so that
+	 * <code>a[ perm[ i ] ] <= a[ perm[ i + 1 ] ]</code>.
+	 *
+	 * <p>This implementation is significantly faster than quicksort (unstable) or mergesort (stable)
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. 
+	 * It will allocate a support array of bytes with the same number of elements as the array to be sorted,
+	 * and, in the stable case, a further support array as large as <code>perm</code> (note that the stable
+	 * version is slightly faster).
+	 * 
+	 * @param perm a permutation array indexing <code>a</code>.
+	 * @param a the array to be sorted.
+	 * @param b the second array to be sorted.
+	 * @param stable whether the sorting algorithm should be stable.
+	 */
+	public static void radixSortIndirect( final int[] perm, final KEY_TYPE[] a, final KEY_TYPE[] b, final boolean stable ) {
+		radixSortIndirect( perm, a, b, 0, perm.length, stable );
+	}
+
+	/** Sorts the specified pair of arrays lexicographically using indirect radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implement an <em>indirect</em> sort. The elements of <code>perm</code> (which must
+	 * be exactly the numbers in the interval <code>[0..perm.length)</code>) will be permuted so that
+	 * <code>a[ perm[ i ] ] <= a[ perm[ i + 1 ] ]</code>.
+	 *
+	 * <p>This implementation is significantly faster than quicksort (unstable) or mergesort (stable)
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. 
+	 * It will allocate a support array of bytes with the same number of elements as the array to be sorted,
+	 * and, in the stable case, a further support array as large as <code>perm</code> (note that the stable
+	 * version is slightly faster).
+	 * 
+	 * @param perm a permutation array indexing <code>a</code>.
+	 * @param a the array to be sorted.
+	 * @param b the second array to be sorted.
+	 * @param from the index of the first element of <code>perm</code> (inclusive) to be permuted.
+	 * @param to the index of the last element of <code>perm</code> (exclusive) to be permuted.
+	 * @param stable whether the sorting algorithm should be stable.
+	 */
+	public static void radixSortIndirect( final int[] perm, final KEY_TYPE[] a, final KEY_TYPE[] b, final int from, final int to, final boolean stable ) {
+		final int layers = 2;
+		if ( a.length != b.length ) throw new IllegalArgumentException( "Array size mismatch." );
+		final int maxLevel = DIGITS_PER_ELEMENT * layers - 1;
+		
+		final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( layers * DIGITS_PER_ELEMENT - 1 ) + 1;
+		final int[] offsetStack = new int[ stackSize ];
+		int offsetPos = 0;
+		final int[] lengthStack = new int[ stackSize ];
+		int lengthPos = 0;
+		final int[] levelStack = new int[ stackSize ];
+		int levelPos = 0;
+		
+		offsetStack[ offsetPos++ ] = from;
+		lengthStack[ lengthPos++ ] = to - from;
+		levelStack[ levelPos++ ] = 0;
+
+		final int[] count = new int[ 1 << DIGIT_BITS ];
+		final int[] pos = stable ? null : new int[ 1 << DIGIT_BITS ];
+		final int[] support = stable ? new int[ perm.length ] : null;
+		final byte[] digit = new byte[ to - from ];
+
+		while( offsetPos > 0 ) {
+			final int first = offsetStack[ --offsetPos ];
+			final int length = lengthStack[ --lengthPos ];
+			final int level = levelStack[ --levelPos ];
+#if #keyclass(Character)
+			final int signMask = 0;
+#else
+			final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0;
+#endif
+			
+			if ( length < MEDIUM ) {
+				insertionSortIndirect( perm, a, b, first, first + length );
+				continue;
+			}
+			
+			final KEY_TYPE[] k = level < DIGITS_PER_ELEMENT ? a : b; // This is the key array
+			final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key
+
+			// Count keys.
+			for( int i = length; i-- != 0; ) digit[ i ] = (byte)( ( ( KEY2LEXINT( k[ perm[ first + i ] ] ) >>> shift ) & DIGIT_MASK ) ^ signMask );
+			for( int i = length; i-- != 0; ) count[ digit[ i ] & 0xFF ]++;
+			// Compute cumulative distribution and push non-singleton keys on stack.
+			int lastUsed = -1;
+			
+			for( int i = 0, p = 0; i < 1 << DIGIT_BITS; i++ ) {
+				if ( count[ i ] != 0 ) {
+					lastUsed = i;
+					if ( level < maxLevel && count[ i ] > 1 ){
+						offsetStack[ offsetPos++ ] = p + first;
+						lengthStack[ lengthPos++ ] = count[ i ];
+						levelStack[ levelPos++ ] = level + 1;
+					}
+				}
+				if ( stable ) count[ i ] = p += count[ i ];
+				else pos[ i ] = ( p += count[ i ] );
+			}
+			
+			if ( stable ) {
+				for( int i = length; i-- != 0; ) support[ --count[ digit[ i ] & 0xFF ] ] = perm[ first + i ];
+				System.arraycopy( support, 0, perm, first, length );
+				it.unimi.dsi.fastutil.ints.IntArrays.fill( count, 0 );
+			}
+			else {
+				// When all slots are OK, the last slot is necessarily OK.
+				final int end = length - count[ lastUsed ];
+				count[ lastUsed ] = 0;
+				// i moves through the start of each block
+				for( int i = 0, c = -1, d; i < end; i += count[ c ], count[ c ] = 0 ) {
+					int t = perm[ i + first ];
+					c = digit[ i ] & 0xFF;
+					while( ( d = --pos[ c ] ) > i ) {
+						final int z = t;
+						final int zz = c;
+						t = perm[ d + first ];
+						c = digit[ d ] & 0xFF;
+						perm[ d + first ] = z;
+						digit[ d ] = (byte)zz;
+					}
+
+					perm[ i + first ] = t;
+				}
+			}
+		}
+	}
+
+
+
+
+	private static void selectionSort( final KEY_TYPE[][] a, final int from, final int to, final int level ) {
+		final int layers = a.length;
+		final int firstLayer = level / DIGITS_PER_ELEMENT;
+
+		for( int i = from; i < to - 1; i++ ) {
+			int m = i;
+			for( int j = i + 1; j < to; j++ ) {
+				for( int p = firstLayer; p < layers; p++ ) {
+					if ( a[ p ][ j ] < a[ p ][ m ] ) {
+						m = j;
+						break;
+					}
+					else if ( a[ p ][ j ] > a[ p ][ m ] ) break;
+				}
+			}
+			if ( m != i ) {
+				for( int p = layers; p-- != 0; ) {
+					final KEY_TYPE u = a[ p ][ i ];
+					a[ p ][ i ] = a[ p ][ m ];
+					a[ p ][ m ] = u;
+				}
+			}
+		}
+	}
+
+
+	
+	/** Sorts the specified array of arrays lexicographically using radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implements a <em>lexicographical</em> sorting of the provided arrays. Tuples of elements
+	 * in the same position will be considered a single key, and permuted
+	 * accordingly.
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. It will allocate a support array of bytes with the same number of elements as the arrays to be sorted.
+	 * 
+	 * @param a an array containing arrays of equal length to be sorted lexicographically in parallel.
+	 */
+	public static void radixSort( final KEY_TYPE[][] a ) {
+		radixSort( a, 0, a[ 0 ].length );
+	}
+
+	/** Sorts the specified array of arrays lexicographically using radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implements a <em>lexicographical</em> sorting of the provided arrays. Tuples of elements
+	 * in the same position will be considered a single key, and permuted
+	 * accordingly.
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. It will allocate a support array of bytes with the same number of elements as the arrays to be sorted.
+	 * 
+	 * @param a an array containing arrays of equal length to be sorted lexicographically in parallel.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 */
+	public static void radixSort( final KEY_TYPE[][] a, final int from, final int to ) {
+		final int layers = a.length;
+		final int maxLevel = DIGITS_PER_ELEMENT * layers - 1;
+		for( int p = layers, l = a[ 0 ].length; p-- != 0; ) if ( a[ p ].length != l ) throw new IllegalArgumentException( "The array of index " + p + " has not the same length of the array of index 0." );
+
+		final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( layers * DIGITS_PER_ELEMENT - 1 ) + 1;
+		final int[] offsetStack = new int[ stackSize ];
+		int offsetPos = 0;
+		final int[] lengthStack = new int[ stackSize ];
+		int lengthPos = 0;
+		final int[] levelStack = new int[ stackSize ];
+		int levelPos = 0;
+		
+		offsetStack[ offsetPos++ ] = from;
+		lengthStack[ lengthPos++ ] = to - from;
+		levelStack[ levelPos++ ] = 0;
+
+		final int[] count = new int[ 1 << DIGIT_BITS ];
+		final int[] pos = new int[ 1 << DIGIT_BITS ];
+		final byte[] digit = new byte[ to - from ];
+		final KEY_TYPE[] t = new KEY_TYPE[ layers ];
+
+		while( offsetPos > 0 ) {
+			final int first = offsetStack[ --offsetPos ];
+			final int length = lengthStack[ --lengthPos ];
+			final int level = levelStack[ --levelPos ];
+#if #keyclass(Character)
+			final int signMask = 0;
+#else
+			final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0;
+#endif
+			
+			if ( length < MEDIUM ) {
+				selectionSort( a, first, first + length, level );
+				continue;
+			}
+			
+			final KEY_TYPE[] k = a[ level / DIGITS_PER_ELEMENT ]; // This is the key array
+			final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key
+
+			// Count keys.
+			for( int i = length; i-- != 0; ) digit[ i ] = (byte)( ( KEY2LEXINT( k[ first + i ] ) >>> shift & DIGIT_MASK ) ^ signMask );
+			for( int i = length; i-- != 0; ) count[ digit[ i ] & 0xFF ]++;
+			// Compute cumulative distribution and push non-singleton keys on stack.
+			int lastUsed = -1;
+			
+			for( int i = 0, p = 0; i < 1 << DIGIT_BITS; i++ ) {
+				if ( count[ i ] != 0 ) {
+					lastUsed = i;
+					if ( level < maxLevel && count[ i ] > 1 ){
+						offsetStack[ offsetPos++ ] = p + first;
+						lengthStack[ lengthPos++ ] = count[ i ];
+						levelStack[ levelPos++ ] = level + 1;
+					}
+				}
+				pos[ i ] = ( p += count[ i ] );
+			}
+
+			// When all slots are OK, the last slot is necessarily OK.
+			final int end = length - count[ lastUsed ];
+			count[ lastUsed ] = 0;
+
+			// i moves through the start of each block
+			for( int i = 0, c = -1, d; i < end; i += count[ c ], count[ c ] = 0 ) {
+				for( int p = layers; p-- != 0; ) t[ p ] = a[ p ][ i + first ];
+				c = digit[ i ] & 0xFF;
+				
+				 while( ( d = --pos[ c ] ) > i ) {
+					for( int p = layers; p-- != 0; ) {
+						final KEY_TYPE u = t[ p ];
+						t[ p ] = a[ p ][ d + first ];
+						a[ p ][ d + first ] = u;
+					}
+					final int zz = c;
+					c = digit[ d ] & 0xFF;
+					digit[ d ] = (byte)zz;
+				}
+
+				for( int p = layers; p-- != 0; ) a[ p ][ i + first ] = t[ p ];
+			}
+		}
+	}
+
+
+#endif
+
+#endif
+
+	/** Shuffles the specified array fragment using the specified pseudorandom number generator.
+	 * 
+	 * @param a the array to be shuffled.
+	 * @param from the index of the first element (inclusive) to be shuffled.
+	 * @param to the index of the last element (exclusive) to be shuffled.
+	 * @param random a pseudorandom number generator (please use a <a href="http://dsiutils.dsi.unimi.it/docs/it/unimi/dsi/util/XorShiftStarRandom.html">XorShift*</a> generator).
+	 * @return <code>a</code>.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] shuffle( final KEY_GENERIC_TYPE[] a, final int from, final int to, final Random random ) {
+		for( int i = to - from; i-- != 0; ) {
+			final int p = random.nextInt( i + 1 ); 
+			final KEY_GENERIC_TYPE t = a[ from + i ];
+			a[ from + i ] = a[ from + p ];
+			a[ from + p ] = t;
+		}
+		return a;
+	}
+
+	/** Shuffles the specified array using the specified pseudorandom number generator.
+	 * 
+	 * @param a the array to be shuffled.
+	 * @param random a pseudorandom number generator (please use a <a href="http://dsiutils.dsi.unimi.it/docs/it/unimi/dsi/util/XorShiftStarRandom.html">XorShift*</a> generator).
+	 * @return <code>a</code>.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] shuffle( final KEY_GENERIC_TYPE[] a, final Random random ) {
+		for( int i = a.length; i-- != 0; ) {
+			final int p = random.nextInt( i + 1 ); 
+			final KEY_GENERIC_TYPE t = a[ i ];
+			a[ i ] = a[ p ];
+			a[ p ] = t;
+		}
+		return a;
+	}
+
+	/** Reverses the order of the elements in the specified array.
+	 * 
+	 * @param a the array to be reversed.
+	 * @return <code>a</code>.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] reverse( final KEY_GENERIC_TYPE[] a ) {
+		final int length = a.length;
+		for( int i = length / 2; i-- != 0; ) {
+			final KEY_GENERIC_TYPE t = a[ length - i - 1 ];
+			a[ length - i - 1 ] = a[ i ];
+			a[ i ] = t;
+		}
+		return a;
+	}
+
+	/** A type-specific content-based hash strategy for arrays. */
+
+	private static final class ArrayHashStrategy KEY_GENERIC implements Hash.Strategy<KEY_GENERIC_TYPE[]>, java.io.Serializable {
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		public int hashCode( final KEY_GENERIC_TYPE[] o ) {
+			return java.util.Arrays.hashCode( o );
+		}
+		
+		public boolean equals( final KEY_GENERIC_TYPE[] a, final KEY_GENERIC_TYPE[] b ) {
+			return ARRAYS.equals( a, b );
+		}
+	}
+
+	/** A type-specific content-based hash strategy for arrays.
+	 *
+	 * <P>This hash strategy may be used in custom hash collections whenever keys are
+	 * arrays, and they must be considered equal by content. This strategy
+	 * will handle <code>null</code> correctly, and it is serializable.
+	 */
+
+#if #keys(primitive)
+	public final static Hash.Strategy<KEY_TYPE[]> HASH_STRATEGY = new ArrayHashStrategy();
+#else
+	@SuppressWarnings({"rawtypes"})
+	public final static Hash.Strategy HASH_STRATEGY = new ArrayHashStrategy();
+#endif
+
+}
diff --git a/drv/BidirectionalIterator.drv b/drv/BidirectionalIterator.drv
new file mode 100644
index 0000000..8cd2082
--- /dev/null
+++ b/drv/BidirectionalIterator.drv
@@ -0,0 +1,62 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.BidirectionalIterator;
+#if #keys(primitive)
+import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator;
+#endif
+
+/** A type-specific bidirectional iterator; provides an additional method to avoid (un)boxing,
+ * and the possibility to skip elements backwards.
+ *
+ * @see BidirectionalIterator
+ */
+
+#if #keys(primitive)
+public interface KEY_BIDI_ITERATOR KEY_GENERIC extends KEY_ITERATOR KEY_GENERIC, ObjectBidirectionalIterator<KEY_GENERIC_CLASS> {
+#else
+public interface KEY_BIDI_ITERATOR KEY_GENERIC extends KEY_ITERATOR KEY_GENERIC, BidirectionalIterator<KEY_GENERIC_CLASS> {
+#endif
+
+#if #keys(primitive)
+
+	/**
+	 * Returns the previous element as a primitive type.
+	 *
+	 * @return the previous element in the iteration.
+	 * @see java.util.ListIterator#previous()
+	 */
+
+	KEY_TYPE PREV_KEY();
+
+#endif
+
+	/** Moves back for the given number of elements.
+	 *
+	 * <P>The effect of this call is exactly the same as that of
+	 * calling {@link #previous()} for <code>n</code> times (possibly stopping
+	 * if {@link #hasPrevious()} becomes false).
+	 *
+	 * @param n the number of elements to skip back.
+	 * @return the number of elements actually skipped.
+	 * @see java.util.Iterator#next()
+	 */
+
+	int back( int n );
+}
diff --git a/drv/BigArrayBigList.drv b/drv/BigArrayBigList.drv
new file mode 100644
index 0000000..ef36cc9
--- /dev/null
+++ b/drv/BigArrayBigList.drv
@@ -0,0 +1,1168 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Iterator;
+import java.util.RandomAccess;
+import java.util.NoSuchElementException;
+
+import it.unimi.dsi.fastutil.Arrays;
+import it.unimi.dsi.fastutil.BigArrays;
+
+#if #keys(primitive)
+
+/** A type-specific big list based on a big array; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>This class implements a lightweight, fast, open, optimized,
+ * reuse-oriented version of big-array-based big lists. Instances of this class
+ * represent a big list with a big array that is enlarged as needed when new entries
+ * are created (by doubling the current length), but is
+ * <em>never</em> made smaller (even on a {@link #clear()}). A family of
+ * {@linkplain #trim() trimming methods} lets you control the size of the
+ * backing big array; this is particularly useful if you reuse instances of this class.
+ * Range checks are equivalent to those of {@link java.util}'s classes, but
+ * they are delayed as much as possible. The backing big array is exposed by the
+ * {@link #elements()} method.
+ *
+ * <p>This class implements the bulk methods <code>removeElements()</code>,
+ * <code>addElements()</code> and <code>getElements()</code> using
+ * high-performance system calls (e.g., {@link
+ * System#arraycopy(Object,int,Object,int,int) System.arraycopy()} instead of
+ * expensive loops.
+ *
+ * @see java.util.ArrayList
+ */
+
+public class BIG_ARRAY_BIG_LIST KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements RandomAccess, Cloneable, java.io.Serializable {
+	private static final long serialVersionUID = -7046029254386353130L;
+
+
+#else
+
+/** A type-specific big-array-based big list; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>This class implements a lightweight, fast, open, optimized,
+ * reuse-oriented version of big-array-based big lists. Instances of this class
+ * represent a big list with a big array that is enlarged as needed when new entries
+ * are created (by doubling the current length), but is
+ * <em>never</em> made smaller (even on a {@link #clear()}). A family of
+ * {@linkplain #trim() trimming methods} lets you control the size of the
+ * backing big array; this is particularly useful if you reuse instances of this class.
+ * Range checks are equivalent to those of {@link java.util}'s classes, but
+ * they are delayed as much as possible. 
+ *
+ * <p>The backing big array is exposed by the {@link #elements()} method. If an instance
+ * of this class was created {@linkplain #wrap(Object[][],long) by wrapping}, 
+ * backing-array reallocations will be performed using reflection, so that
+ * {@link #elements()} can return a big array of the same type of the original big array; the comments
+ * about efficiency made in {@link it.unimi.dsi.fastutil.objects.ObjectArrays} apply here.
+ *
+ * <p>This class implements the bulk methods <code>removeElements()</code>,
+ * <code>addElements()</code> and <code>getElements()</code> using
+ * high-performance system calls (e.g., {@link
+ * System#arraycopy(Object,int,Object,int,int) System.arraycopy()} instead of
+ * expensive loops.
+ *
+ * @see java.util.ArrayList
+ */
+
+public class BIG_ARRAY_BIG_LIST KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements RandomAccess, Cloneable, java.io.Serializable {
+	private static final long serialVersionUID = -7046029254386353131L;
+
+
+#endif
+
+	/** The initial default capacity of a big-array big list. */
+	public final static int DEFAULT_INITIAL_CAPACITY = 16;
+
+#if ! #keys(primitive)
+	/** Whether the backing big array was passed to <code>wrap()</code>. In
+	 * this case, we must reallocate with the same type of big array. */
+	protected final boolean wrapped;
+#endif
+
+	/** The backing big array. */
+	protected transient KEY_GENERIC_TYPE a[][];
+
+	/** The current actual size of the big list (never greater than the backing-array length). */
+	protected long size;
+
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	/** Creates a new big-array big list using a given array.
+	 *
+	 * <P>This constructor is only meant to be used by the wrapping methods.
+	 *
+	 * @param a the big array that will be used to back this big-array big list.
+	 */
+
+	@SuppressWarnings("unused")
+	protected BIG_ARRAY_BIG_LIST( final KEY_GENERIC_TYPE a[][], boolean dummy ) {
+		this.a = a;
+#if ! #keys(primitive)
+		this.wrapped = true;
+#endif
+	}
+
+	/** Creates a new big-array big list with given capacity.
+	 *
+	 * @param capacity the initial capacity of the array list (may be 0).
+	 */
+
+	@SuppressWarnings("unchecked")
+	public BIG_ARRAY_BIG_LIST( final long capacity ) {
+		if ( capacity < 0 ) throw new IllegalArgumentException( "Initial capacity (" + capacity + ") is negative" );
+
+		a = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( capacity );
+#if ! #keys(primitive)
+		wrapped = false;
+#endif
+	}
+
+	/** Creates a new big-array big list with {@link #DEFAULT_INITIAL_CAPACITY} capacity.
+	 */
+	 
+	public BIG_ARRAY_BIG_LIST() {
+		this( DEFAULT_INITIAL_CAPACITY );
+	}
+
+	/** Creates a new big-array big list and fills it with a given type-specific collection.
+	 *
+	 * @param c a type-specific collection that will be used to fill the array list.
+	 */
+	 
+	public BIG_ARRAY_BIG_LIST( final COLLECTION KEY_EXTENDS_GENERIC c ) {
+		this( c.size() );
+		for( KEY_ITERATOR KEY_EXTENDS_GENERIC i = c.iterator(); i.hasNext(); ) add( i.NEXT_KEY() );
+	}
+
+	/** Creates a new big-array big list and fills it with a given type-specific list.
+	 *
+	 * @param l a type-specific list that will be used to fill the array list.
+	 */
+	 
+	public BIG_ARRAY_BIG_LIST( final BIG_LIST KEY_EXTENDS_GENERIC l ) {
+		this( l.size64() );
+		l.getElements( 0, a, 0, size = l.size64() );
+	}
+
+	/** Creates a new big-array big list and fills it with the elements of a given big array.
+	 *
+	 * <p>Note that this constructor makes it easy to build big lists from literal arrays
+	 * declared as <code><var>type</var>[][] {{ <var>init_values</var> }}</code>.
+	 * The only constraint is that the number of initialisation values is
+	 * below {@link it.unimi.dsi.fastutil.BigArrays#SEGMENT_SIZE}.
+	 *
+	 * @param a a big array whose elements will be used to fill the array list.
+	 */
+	 
+	public BIG_ARRAY_BIG_LIST( final KEY_GENERIC_TYPE a[][] ) {
+		this( a, 0, BIG_ARRAYS.length( a ) );
+	}
+
+	/** Creates a new big-array big list and fills it with the elements of a given big array.
+	 *
+	 * <p>Note that this constructor makes it easy to build big lists from literal arrays
+	 * declared as <code><var>type</var>[][] {{ <var>init_values</var> }}</code>.
+	 * The only constraint is that the number of initialisation values is
+	 * below {@link it.unimi.dsi.fastutil.BigArrays#SEGMENT_SIZE}.
+	 *
+	 * @param a a big array whose elements will be used to fill the array list.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 */
+	 
+	public BIG_ARRAY_BIG_LIST( final KEY_GENERIC_TYPE a[][], final long offset, final long length ) {
+		this( length );
+		BIG_ARRAYS.copy( a, offset, this.a, 0, length );
+		size = length;
+	}
+
+	/** Creates a new big-array big list and fills it with the elements returned by an iterator..
+	 *
+	 * @param i an iterator whose returned elements will fill the array list.
+	 */
+	 
+	public BIG_ARRAY_BIG_LIST( final Iterator<? extends KEY_GENERIC_CLASS> i ) {
+		this();
+		while( i.hasNext() ) this.add( i.next() );
+	}
+
+	/** Creates a new big-array big list and fills it with the elements returned by a type-specific iterator..
+	 *
+	 * @param i a type-specific iterator whose returned elements will fill the array list.
+	 */
+	 
+	public BIG_ARRAY_BIG_LIST( final KEY_ITERATOR KEY_EXTENDS_GENERIC i ) {
+		this();
+		while( i.hasNext() ) this.add( i.NEXT_KEY() );
+	}
+
+#if #keys(primitive)
+	/** Returns the backing big array of this big list.
+	 *
+	 * @return the backing big array.
+	 */
+
+	public KEY_GENERIC_TYPE[][] elements() {
+		return a;
+	}
+#else
+	/** Returns the backing big array of this big list.
+	 *
+	 * <P>If this big-array big list was created by wrapping a given big array, it is guaranteed
+	 * that the type of the returned big array will be the same. Otherwise, the returned
+	 * big array will be an big array of objects.
+	 *
+	 * @return the backing big array.
+	 */
+
+	public KEY_GENERIC_TYPE[][] elements() {
+		return a;
+	}
+#endif
+
+	/** Wraps a given big array into a big-array list of given size.
+	 *
+	 * @param a a big array to wrap.
+	 * @param length the length of the resulting big-array list.
+	 * @return a new big-array list of the given size, wrapping the given big array.
+	 */
+
+	public static KEY_GENERIC BIG_ARRAY_BIG_LIST KEY_GENERIC wrap( final KEY_GENERIC_TYPE a[][], final long length ) {
+		if ( length > BIG_ARRAYS.length( a ) ) throw new IllegalArgumentException( "The specified length (" + length + ") is greater than the array size (" + BIG_ARRAYS.length( a ) + ")" );
+		final BIG_ARRAY_BIG_LIST KEY_GENERIC l = new BIG_ARRAY_BIG_LIST KEY_GENERIC( a, false );
+		l.size = length;
+		return l;
+	}
+
+	/** Wraps a given big array into a big-array big list.
+	 *
+	 * @param a a big array to wrap.
+	 * @return a new big-array big list wrapping the given array.
+	 */
+
+	public static KEY_GENERIC BIG_ARRAY_BIG_LIST KEY_GENERIC wrap( final KEY_GENERIC_TYPE a[][] ) {
+		return wrap( a, BIG_ARRAYS.length( a ) );
+	}
+
+
+	/** Ensures that this big-array big list can contain the given number of entries without resizing.
+	 *
+	 * @param capacity the new minimum capacity for this big-array big list.
+	 */
+	@SuppressWarnings("unchecked")
+	public void ensureCapacity( final long capacity ) {
+#if #keys(primitive)
+		a = BIG_ARRAYS.ensureCapacity( a, capacity, size );
+#else
+		if ( wrapped ) a = BIG_ARRAYS.ensureCapacity( a, capacity, size );
+		else {
+			if ( capacity > BIG_ARRAYS.length( a ) ) {
+				final Object t[][] = BIG_ARRAYS.newBigArray( capacity );
+				BIG_ARRAYS.copy( a, 0, t, 0, size );
+				a = (KEY_GENERIC_TYPE[][])t;
+			}
+		}
+#endif
+		if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a );
+	}
+
+	/** Grows this big-array big list, ensuring that it can contain the given number of entries without resizing,
+	 * and in case enlarging it at least by a factor of two.
+	 *
+	 * @param capacity the new minimum capacity for this big-array big list.
+	 */
+	@SuppressWarnings("unchecked")
+	private void grow( final long capacity ) {
+#if #keys(primitive)
+		a = BIG_ARRAYS.grow( a, capacity, size );
+#else
+		if ( wrapped ) a =  BIG_ARRAYS.grow( a, capacity, size );
+		else {
+			if ( capacity > BIG_ARRAYS.length( a ) ) {
+				final int newLength = (int)Math.min( Math.max( 2 * BIG_ARRAYS.length( a ), capacity ), Arrays.MAX_ARRAY_SIZE );
+				final Object t[][] = BIG_ARRAYS.newBigArray( newLength );
+				BIG_ARRAYS.copy( a, 0, t, 0, size );
+				a = (KEY_GENERIC_TYPE[][])t;
+			}			
+		}
+#endif
+		if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a );
+	}
+
+	public void add( final long index, final KEY_GENERIC_TYPE k ) {
+		ensureIndex( index );
+		grow( size + 1 );
+		if ( index != size ) BIG_ARRAYS.copy( a, index, a, index + 1, size - index );
+		BIG_ARRAYS.set( a, index, k );
+		size++;
+		if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a );
+	}
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		grow( size + 1 );
+		BIG_ARRAYS.set( a, size++, k );
+		if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a );
+		return true;
+	}
+
+	public KEY_GENERIC_TYPE GET_KEY( final long index ) {
+		if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" );
+		return BIG_ARRAYS.get( a, index );
+	}
+
+	public long indexOf( final KEY_TYPE k ) {
+		for( long i = 0; i < size; i++ ) if ( KEY_EQUALS( k, BIG_ARRAYS.get( a, i ) ) ) return i;
+		return -1;
+	}
+
+
+	public long lastIndexOf( final KEY_TYPE k ) {
+		for( long i = size; i-- != 0; ) if ( KEY_EQUALS( k, BIG_ARRAYS.get( a, i ) ) ) return i;
+		return -1;
+	}
+
+	public KEY_GENERIC_TYPE REMOVE_KEY( final long index ) {
+		if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" );
+		final KEY_GENERIC_TYPE old = BIG_ARRAYS.get( a, index );
+		size--;
+		if ( index != size ) BIG_ARRAYS.copy( a, index + 1, a, index, size - index );
+#if #keys(reference)
+		BIG_ARRAYS.set( a, size, null );
+#endif
+		if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a );
+		return old;
+	}
+
+	public boolean rem( final KEY_TYPE k ) {
+		final long index = indexOf( k );
+		if ( index == -1 ) return false;
+		REMOVE_KEY( index );
+		if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a );
+		return true;
+	}
+
+#if #keys(reference)
+	public boolean remove( final Object o ) {
+		return rem( o );
+	}
+#endif
+
+	public KEY_GENERIC_TYPE set( final long index, final KEY_GENERIC_TYPE k ) {
+		if ( index >= size ) throw new IndexOutOfBoundsException( "Index (" + index + ") is greater than or equal to list size (" + size + ")" );
+		KEY_GENERIC_TYPE old = BIG_ARRAYS.get( a, index );
+		BIG_ARRAYS.set( a, index, k );
+		return old;
+	}
+
+	public void clear() {
+#if #keys(reference)
+		BIG_ARRAYS.fill( a, 0, size, null );
+#endif
+		size = 0;
+		if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a );
+	}
+
+	public long size64() {
+		return size;
+	}		
+
+	public void size( final long size ) {
+		if ( size > BIG_ARRAYS.length( a ) ) ensureCapacity( size );
+		if ( size > this.size ) BIG_ARRAYS.fill( a, this.size, size, KEY_NULL );
+#if #keys(reference)
+		else BIG_ARRAYS.fill( a, size, this.size, KEY_NULL );
+#endif
+		this.size = size;
+	}		
+
+	public boolean isEmpty() {
+		return size == 0;
+	}		
+
+	/** Trims this big-array big list so that the capacity is equal to the size. 
+	 *
+	 * @see java.util.ArrayList#trimToSize()
+	 */
+	public void trim() {
+		trim( 0 );
+	}
+
+	/** Trims the backing big array if it is too large.
+	 * 
+	 * If the current big array length is smaller than or equal to
+	 * <code>n</code>, this method does nothing. Otherwise, it trims the
+	 * big-array length to the maximum between <code>n</code> and {@link #size64()}.
+	 *
+	 * <P>This method is useful when reusing big lists.  {@linkplain #clear() Clearing a
+	 * big list} leaves the big-array length untouched. If you are reusing a big list
+	 * many times, you can call this method with a typical
+	 * size to avoid keeping around a very large big array just
+	 * because of a few large transient big lists.
+	 *
+	 * @param n the threshold for the trimming.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public void trim( final long n ) {
+		final long arrayLength = BIG_ARRAYS.length( a );
+		if ( n >= arrayLength || size == arrayLength ) return;
+		a = BIG_ARRAYS.trim( a, Math.max( n, size ) );
+		if ( ASSERTS ) assert size <= BIG_ARRAYS.length( a );
+	}
+
+
+   	/** Copies element of this type-specific list into the given big array using optimized system calls.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param a the destination big array.
+	 * @param offset the offset into the destination array where to store the first element copied.
+	 * @param length the number of elements to be copied.
+	 */
+
+	public void getElements( final int from, final KEY_TYPE[][] a, final long offset, final long length ) {
+		BIG_ARRAYS.copy( this.a, from, a, offset, length );
+	}
+
+	/** Removes elements of this type-specific list using optimized system calls.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param to the end index (exclusive).
+	 */
+	public void removeElements( final int from, final int to ) {
+		BigArrays.ensureFromTo( size, from, to );
+		BIG_ARRAYS.copy( a, to, a, from, size - to );
+		size -= ( to - from );
+#if #keys(reference)
+		BIG_ARRAYS.fill( a, size, size + to - from, null );
+#endif
+	}
+	
+
+	/** Adds elements to this type-specific list using optimized system calls.
+	 *
+	 * @param index the index at which to add elements.
+	 * @param a the big array containing the elements.
+	 * @param offset the offset of the first element to add.
+	 * @param length the number of elements to add.
+	 */
+	public void addElements( final int index, final KEY_GENERIC_TYPE a[][], final long offset, final long length ) {
+		ensureIndex( index );
+		BIG_ARRAYS.ensureOffsetLength( a, offset, length );
+		grow( size + length );
+		BIG_ARRAYS.copy( this.a, index, this.a, index + length, size - index );
+		BIG_ARRAYS.copy( a, offset, this.a, index, length );
+		size += length;
+	}
+
+	public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final int index ) {
+		ensureIndex( index );
+
+		return new KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC() {
+				int pos = index, last = -1;
+
+				public boolean hasNext() { return pos < size; }
+				public boolean hasPrevious() { return pos > 0; }
+				public KEY_GENERIC_TYPE NEXT_KEY() { if ( ! hasNext() ) throw new NoSuchElementException(); return BIG_ARRAYS.get( a, last = pos++ ); }
+				public KEY_GENERIC_TYPE PREV_KEY() { if ( ! hasPrevious() ) throw new NoSuchElementException(); return BIG_ARRAYS.get( a, last = --pos ); }
+				public long nextIndex() { return pos; }
+				public long previousIndex() { return pos - 1; }
+				public void add( KEY_GENERIC_TYPE k ) { 
+					if ( last == -1 ) throw new IllegalStateException();
+					BIG_ARRAY_BIG_LIST.this.add( pos++, k ); 
+					last = -1;
+				}
+				public void set( KEY_GENERIC_TYPE k ) { 
+					if ( last == -1 ) throw new IllegalStateException();
+					BIG_ARRAY_BIG_LIST.this.set( last, k );
+				}
+				public void remove() { 
+					if ( last == -1 ) throw new IllegalStateException();
+					BIG_ARRAY_BIG_LIST.this.REMOVE_KEY( last ); 
+					/* If the last operation was a next(), we are removing an element *before* us, and we must decrease pos correspondingly. */
+					if ( last < pos ) pos--;
+					last = -1;
+				}
+			};
+	}
+
+
+	@SuppressWarnings("unchecked")
+	public BIG_ARRAY_BIG_LIST KEY_GENERIC clone() {
+		BIG_ARRAY_BIG_LIST KEY_GENERIC c = new BIG_ARRAY_BIG_LIST KEY_GENERIC( size );
+		BIG_ARRAYS.copy( a, 0, c.a, 0, size );
+		c.size = size;
+		return c;
+	}
+
+#if #keyclass(Object)
+	private boolean valEquals( final K a, final K b ) {
+		return a == null ? b == null : a.equals( b );
+	}
+#endif
+
+    /** Compares this type-specific big-array list to another one.
+	 *
+	 * <P>This method exists only for sake of efficiency. The implementation
+	 * inherited from the abstract implementation would already work.
+	 *
+	 * @param l a type-specific big-array list.
+     * @return true if the argument contains the same elements of this type-specific big-array list.
+	 */
+	public boolean equals( final BIG_ARRAY_BIG_LIST KEY_GENERIC l ) {
+		if ( l == this ) return true;
+		long s = size64();
+		if ( s != l.size64() ) return false;
+		final KEY_GENERIC_TYPE[][] a1 = a;
+		final KEY_GENERIC_TYPE[][] a2 = l.a;
+
+#if #keyclass(Object)
+		while( s-- !=  0 ) if ( ! valEquals( BIG_ARRAYS.get( a1, s ), BIG_ARRAYS.get( a2, s ) ) ) return false;
+#else
+		while( s-- !=  0 ) if ( BIG_ARRAYS.get( a1, s ) != BIG_ARRAYS.get( a2, s ) ) return false;
+#endif
+		return true;
+	}
+
+
+#if ! #keyclass(Reference)
+
+    /** Compares this big list to another big list.
+     *
+	 * <P>This method exists only for sake of efficiency. The implementation
+	 * inherited from the abstract implementation would already work.
+	 *
+     * @param l a big list.
+     * @return a negative integer,
+     * zero, or a positive integer as this big list is lexicographically less than, equal
+     * to, or greater than the argument.
+     */
+	@SuppressWarnings("unchecked")
+	public int compareTo( final BIG_ARRAY_BIG_LIST KEY_EXTENDS_GENERIC l ) {
+		final long s1 = size64(), s2 = l.size64();
+		final KEY_GENERIC_TYPE a1[][] = a, a2[][] = l.a;
+		KEY_GENERIC_TYPE e1, e2;
+		int r, i;
+		
+		for( i = 0; i < s1 && i < s2; i++ ) {
+			e1 = BIG_ARRAYS.get( a1, i );
+			e2 = BIG_ARRAYS.get( a2, i );
+			if ( ( r = KEY_CMP( e1, e2 ) ) != 0 ) return r;
+		}
+
+		return i < s2 ? -1 : ( i < s1 ? 1 : 0 );
+	}
+#endif
+
+
+	private void writeObject( java.io.ObjectOutputStream s ) throws java.io.IOException {
+		s.defaultWriteObject();
+		for( int i = 0; i < size; i++ ) s.WRITE_KEY( BIG_ARRAYS.get( a, i ) );
+	}
+
+	@SuppressWarnings("unchecked")
+	private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		a = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( size );
+		for( int i = 0; i < size; i++ ) BIG_ARRAYS.set( a, i, KEY_GENERIC_CAST s.READ_KEY() );
+	}
+
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static Object[] k, v, nk;
+	private static KEY_TYPE kt[];
+	private static KEY_TYPE nkt[];
+	private static BIG_ARRAY_BIG_LIST topList;
+
+	protected static void testLists( BIG_LIST m, BIG_LIST t, int n, int level ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds;
+		Object rt = null;
+		KEY_TYPE rm = KEY_NULL;
+
+		if ( level > 4 ) return;
+				
+
+		/* Now we check that both sets agree on random keys. For m we use the polymorphic method. */
+
+		for( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+
+			KEY_TYPE T = genKey();
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+			
+			try {
+				m.set( p, T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			try {
+				t.set( p, KEY2OBJ( T ) );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): set() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( KEY2OBJ( m.GET_KEY( p ) ) ), "Error (" + level + ", " + seed + "): m and t differ after set() on position " + p + " (" + m.GET_KEY( p ) + ", " + t.get( p ) + ")" );
+
+			p = r.nextInt() % ( n * 2 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+			
+			try {
+				m.GET_KEY( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( KEY2OBJ( m.GET_KEY( p ) ) ), "Error (" + level + ", " + seed + "): m and t differ aftre get() on position " + p + " (" + m.GET_KEY( p ) + ", " + t.get( p ) + ")" );
+			
+		}
+		
+		/* Now we check that both sets agree on random keys. For m we use the standard method. */
+
+		for( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+			
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( m.get( p ) ), "Error (" + level + ", " + seed + "): m and t differ at start on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")" );
+			
+		}
+		
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+		
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+			
+
+		/* Now we check that m actually holds that data. */
+		for(Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(Iterator i=m.listIterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" );
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			ensure( m.contains(T) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in content between t and m (polymorphic method)" );
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			ensure( m.contains(KEY2OBJ(T)) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in content between t and m (polymorphic method)" );
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			try {
+				m.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.add( KEY2OBJ( T ) );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			T = genKey();
+			int p = r.nextInt() % ( 2 * n + 1 );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.add(p, T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.add(p, KEY2OBJ(T));
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): add() divergence in IndexOutOfBoundsException for index " + p + " for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			p = r.nextInt() % ( 2 * n + 1 );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				rm = m.REMOVE_KEY(p);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				rt = t.remove(p);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): remove() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt.equals( KEY2OBJ( rm ) ), "Error (" + level + ", " + seed + "): divergence in remove() between t and m (" + rt + ", " + rm + ")" );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after add/remove" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after add/remove" );
+
+		/* Now we add random data in m and t using addAll on a collection, checking that the result is the same. */
+
+		for(int i=0; i<n;  i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			java.util.Collection m1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for( int j = 0; j < s; j++ ) m1.add( KEY2OBJ( genKey() ) );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.addAll(p, m1);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.addAll(p, m1);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after addAll" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + m + t + "): ! t.equals( m ) after addAll" );
+		}
+
+		if ( m.size64() > n ) {
+			m.size( n );
+			while( t.size64() != n ) t.remove( t.size64() -1 );
+		}
+
+		/* Now we add random data in m and t using addAll on a type-specific collection, checking that the result is the same. */
+
+		for(int i=0; i<n;  i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			COLLECTION m1 = new BIG_ARRAY_BIG_LIST();
+			java.util.Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for( int j = 0; j < s; j++ ) {
+				KEY_TYPE x = genKey();
+				m1.add( x );
+				t1.add( KEY2OBJ( x ) );
+			}
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.addAll(p, m1);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.addAll(p, t1);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): polymorphic addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after polymorphic addAll" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + m + t + "): ! t.equals( m ) after polymorphic addAll" );
+		}
+
+		if ( m.size64() > n ) {
+			m.size( n );
+			while( t.size64() != n ) t.remove( t.size64() -1 );
+		}
+
+		/* Now we add random data in m and t using addAll on a list, checking that the result is the same. */
+
+		for(int i=0; i<n;  i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			BIG_LIST m1 = new BIG_ARRAY_BIG_LIST();
+			java.util.Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for( int j = 0; j < s; j++ ) {
+				KEY_TYPE x = genKey();
+				m1.add( x );
+				t1.add( KEY2OBJ( x ) );
+			}
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+
+			try {
+				m.addAll(p, m1);
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+
+			try {
+				t.addAll(p, t1);
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): list addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after list addAll" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after list addAll" );
+		}
+
+		/* Now we check that both sets agree on random keys. For m we use the standard method. */
+
+		for( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+
+			mThrowsOutOfBounds = tThrowsOutOfBounds  = null;
+			
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + level + ", " + seed + "): get() divergence in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( t.get( p ).equals( m.get( p ) ), "Error (" + level + ", " + seed + "): m and t differ on position " + p + " (" + m.get( p ) + ", " + t.get( p ) +")" );
+			
+		}
+
+		/* Now we inquiry about the content with indexOf()/lastIndexOf(). */
+
+		for(int i=0; i<10*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			ensure( m.indexOf( KEY2OBJ( T ) ) == t.indexOf( KEY2OBJ( T ) ),
+					"Error (" + level + ", " + seed + "): indexOf() divergence for " + T + "  (" + m.indexOf( KEY2OBJ( T ) ) + ", " + t.indexOf( KEY2OBJ( T ) ) + ")" );
+			ensure( m.lastIndexOf( KEY2OBJ( T ) ) == t.lastIndexOf( KEY2OBJ( T ) ),
+					"Error (" + level + ", " + seed + "): lastIndexOf() divergence for " + T + "  (" + m.lastIndexOf( KEY2OBJ( T ) ) + ", " + t.lastIndexOf( KEY2OBJ( T ) ) + ")" );
+			ensure( m.indexOf( T ) == t.indexOf( KEY2OBJ( T ) ),
+					"Error (" + level + ", " + seed + "): polymorphic indexOf() divergence for " + T + "  (" + m.indexOf( T ) + ", " + t.indexOf( KEY2OBJ( T ) ) + ")" );
+			ensure( m.lastIndexOf( T ) == t.lastIndexOf( KEY2OBJ( T ) ),
+					"Error (" + level + ", " + seed + "): polymorphic lastIndexOf() divergence for " + T + "  (" + m.lastIndexOf( T ) + ", " + t.lastIndexOf( KEY2OBJ( T ) ) + ")" );
+		}
+
+		/* Now we check cloning. */
+
+		if ( level == 0 ) {
+			ensure( m.equals( ((BIG_ARRAY_BIG_LIST)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((BIG_ARRAY_BIG_LIST)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+		}
+
+		/* Now we play with constructors. */
+		ensure( m.equals( new BIG_ARRAY_BIG_LIST( (COLLECTION)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( type-specific Collection m )" );
+		ensure( ( new BIG_ARRAY_BIG_LIST( (COLLECTION)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( type-specific nCollection m ) does not equal m" );
+		ensure( m.equals( new BIG_ARRAY_BIG_LIST( (BIG_LIST)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( type-specific List m )" );
+		ensure( ( new BIG_ARRAY_BIG_LIST( (BIG_LIST)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( type-specific List m ) does not equal m" );
+		ensure( m.equals( new BIG_ARRAY_BIG_LIST( m.listIterator() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( m.listIterator() )" );
+		ensure( ( new BIG_ARRAY_BIG_LIST( m.listIterator() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( m.listIterator() ) does not equal m" );
+		ensure( m.equals( new BIG_ARRAY_BIG_LIST( m.iterator() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( m.type_specific_iterator() )" );
+		ensure( ( new BIG_ARRAY_BIG_LIST( m.iterator() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( m.type_specific_iterator() ) does not equal m" );
+
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		BIG_LIST m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (BIG_LIST)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if ! #keyclass(Reference)
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(Iterator i=t.iterator(); i.hasNext(); ) m2.remove(i.next());
+
+		ensure( m2.isEmpty(), "Error (" + level + ", " + seed + "): m2 is not empty (as it should be)" );
+#endif		  
+				 
+		/* Now we play with iterators. */
+
+		{
+			KEY_BIG_LIST_ITERATOR i;
+			KEY_BIG_LIST_ITERATOR j;
+			Object J;
+			i = m.listIterator(); 
+			j = t.listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + level + ", " + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( i.next().equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next()" );
+
+					if ( r.nextFloat() < 0.2 ) {
+						i.remove();
+						j.remove();
+					} 
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.set( T );
+						j.set( KEY2OBJ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.add( T );
+						j.add( KEY2OBJ( T ) );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous()" );
+
+					if ( r.nextFloat() < 0.2 ) {
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.set( T );
+						j.set( KEY2OBJ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.add( T );
+						j.add( KEY2OBJ( T ) );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + level + ", " + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + level + ", " + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+
+		{
+			Object previous = null;
+			Object I, J;
+			long from = m.isEmpty() ? 0 : ( r.nextLong() & 0x7FFFFFFFFFFFFFFFL) % m.size64();
+			KEY_BIG_LIST_ITERATOR i;
+			KEY_BIG_LIST_ITERATOR j;
+			i = m.listIterator( from ); 
+			j = t.listIterator( from ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious() , "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+					//System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+
+					if ( r.nextFloat() < 0.2 ) {
+						//System.err.println("Removing in next");
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.set( T );
+						j.set( KEY2OBJ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.add( T );
+						j.add( KEY2OBJ( T ) );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+
+					if ( r.nextFloat() < 0.2 ) {
+						//System.err.println("Removing in prev");
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.set( T );
+						j.set( KEY2OBJ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						KEY_TYPE T = genKey();
+						i.add( T );
+						j.add( KEY2OBJ( T ) );
+					}
+				}
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" );
+
+		/* Now we select a pair of keys and create a subset. */
+
+		if ( ! m.isEmpty() ) {
+			long start = (r.nextLong() & 0x7FFFFFFFFFFFFFFFL) % m.size64();
+			long end = start + (r.nextLong() & 0x7FFFFFFFFFFFFFFFL) % ( m.size64() - start );
+			//System.err.println("Checking subList from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testLists( m.subList( start, end ), t.subList( start, end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after subList" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subList" );
+
+		}
+
+		m.clear();
+		t.clear();
+		ensure( m.isEmpty(), "Error (" + level + ", " + seed + "): m is not empty after clear()" );
+	}
+
+
+	protected static void test( int n ) {
+		BIG_ARRAY_BIG_LIST m = new BIG_ARRAY_BIG_LIST();
+		BIG_LIST t = BIG_LISTS.asBigList( new ARRAY_LIST() );
+		topList = m;
+		k = new Object[n];
+		nk = new Object[n];
+		kt = new KEY_TYPE[n];
+		nkt = new KEY_TYPE[n];
+
+		for( int i = 0; i < n; i++ ) {
+#if #keys(reference)
+			k[i] = kt[i] = genKey();
+			nk[i] = nkt[i] = genKey();
+#else
+			k[i] = new KEY_CLASS( kt[i] = genKey() );
+			nk[i] = new KEY_CLASS( nkt[i] = genKey() );
+#endif
+		}
+		  
+		/* We add pairs to t. */
+#if #keys(primitive)
+		for( int i = 0; i < n;  i++ ) t.add( (KEY_GENERIC_CLASS)k[i] );
+#else		  
+		for( int i = 0; i < n;  i++ ) t.add( k[i] );
+#endif
+
+		/* We add to m the same data */
+		m.addAll(t);
+
+		testLists( m, t, n, 0 );
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/BigArrays.drv b/drv/BigArrays.drv
new file mode 100644
index 0000000..acca2c4
--- /dev/null
+++ b/drv/BigArrays.drv
@@ -0,0 +1,1498 @@
+/*		 
+ * Copyright (C) 2009-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ *
+ *
+ *
+ * Copyright (C) 1999 CERN - European Organization for Nuclear Research.
+ *
+ *   Permission to use, copy, modify, distribute and sell this software and
+ *   its documentation for any purpose is hereby granted without fee,
+ *   provided that the above copyright notice appear in all copies and that
+ *   both that copyright notice and this permission notice appear in
+ *   supporting documentation. CERN makes no representations about the
+ *   suitability of this software for any purpose. It is provided "as is"
+ *   without expressed or implied warranty. 
+ */
+
+package PACKAGE;
+
+import java.util.Arrays;
+import java.util.Random;
+
+import it.unimi.dsi.fastutil.BigArrays;
+import it.unimi.dsi.fastutil.Hash;
+import static it.unimi.dsi.fastutil.BigArrays.start;
+import static it.unimi.dsi.fastutil.BigArrays.segment;
+import static it.unimi.dsi.fastutil.BigArrays.displacement;
+import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_MASK;
+import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_SIZE;
+
+#if #keys(primitive)
+
+#if ! #keyclass(Byte) && ! #keyclass(Boolean)
+import it.unimi.dsi.fastutil.bytes.ByteBigArrays;
+#endif
+
+/** A class providing static methods and objects that do useful things with {@linkplain BigArrays big arrays}.
+ *
+ * <p>In particular, the <code>ensureCapacity()</code>, <code>grow()</code>,
+ * <code>trim()</code> and <code>setLength()</code> methods allow to handle
+ * big arrays much like array lists.
+ *
+ * <P>Note that {@link it.unimi.dsi.fastutil.io.BinIO} and {@link it.unimi.dsi.fastutil.io.TextIO}
+ * contain several methods that make it possible to load and save big arrays of primitive types as sequences
+ * of elements in {@link java.io.DataInput} format (i.e., not as objects) or as sequences of lines of text.
+ *
+ * @see BigArrays
+ */
+
+public class BIG_ARRAYS {
+
+#else
+
+import java.util.Comparator;
+
+/** A class providing static methods and objects that do useful things with {@linkplain BigArrays big arrays}.
+ *
+ * <p>In particular, the <code>ensureCapacity()</code>, <code>grow()</code>,
+ * <code>trim()</code> and <code>setLength()</code> methods allow to handle
+ * arrays much like array lists. 
+ *
+ * <P>Note that {@link it.unimi.dsi.fastutil.io.BinIO} and {@link it.unimi.dsi.fastutil.io.TextIO}
+ * contain several methods make it possible to load and save big arrays of primitive types as sequences
+ * of elements in {@link java.io.DataInput} format (i.e., not as objects) or as sequences of lines of text.
+ *
+ * <P><strong>Warning:</strong> creating arrays 
+ * using {@linkplain java.lang.reflect.Array#newInstance(Class,int) reflection}, as it
+ * happens in {@link #ensureCapacity(Object[][],long,long)} and {@link #grow(Object[][],long,long)},
+ * is <em>significantly slower</em> than using <code>new</code>. This phenomenon is particularly
+ * evident in the first growth phases of an array reallocated with doubling (or similar) logic.
+ *
+ * @see BigArrays
+ */
+
+public class BIG_ARRAYS {
+
+#endif
+	private BIG_ARRAYS() {}
+
+	/** A static, final, empty big array. */
+	public final static KEY_TYPE[][] EMPTY_BIG_ARRAY = {};
+
+	/** Returns the element of the given big array of specified index.
+	 * 
+	 * @param array a big array.
+	 * @param index a position in the big array.
+	 * @return the element of the big array at the specified position.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE get( final KEY_GENERIC_TYPE[][] array, final long index ) {
+		return array[ segment( index ) ][ displacement( index ) ];
+	}
+	
+	/** Sets the element of the given big array of specified index.
+	 * 
+	 * @param array a big array.
+	 * @param index a position in the big array.
+	 */
+	public static KEY_GENERIC void set( final KEY_GENERIC_TYPE[][] array, final long index, KEY_GENERIC_TYPE value ) {
+		array[ segment( index ) ][ displacement( index ) ] = value;
+	}
+	
+	/** Swaps the element of the given big array of specified indices.
+	 * 
+	 * @param array a big array.
+	 * @param first a position in the big array.
+	 * @param second a position in the big array.
+	 */
+	public static KEY_GENERIC void swap( final KEY_GENERIC_TYPE[][] array, final long first, final long second ) {
+		final KEY_GENERIC_TYPE t = array[ segment( first ) ][ displacement( first ) ];
+		array[ segment( first ) ][ displacement( first ) ] = array[ segment( second ) ][ displacement( second ) ];
+		array[ segment( second ) ][ displacement( second ) ] = t;
+	}
+	
+#if #keys(primitive) && ! #keyclass(Boolean)
+	/** Adds the specified increment the element of the given big array of specified index.
+	 * 
+	 * @param array a big array.
+	 * @param index a position in the big array.
+	 * @param incr the increment
+	 */
+	public static void add( final KEY_GENERIC_TYPE[][] array, final long index, KEY_GENERIC_TYPE incr ) {
+		array[ segment( index ) ][ displacement( index ) ] += incr;
+	}
+
+	/** Multiplies by the specified factor the element of the given big array of specified index.
+	 * 
+	 * @param array a big array.
+	 * @param index a position in the big array.
+	 * @param factor the factor
+	 */
+	public static void mul( final KEY_GENERIC_TYPE[][] array, final long index, KEY_GENERIC_TYPE factor ) {
+		array[ segment( index ) ][ displacement( index ) ] *= factor;
+	}
+
+	/** Increments the element of the given big array of specified index.
+	 * 
+	 * @param array a big array.
+	 * @param index a position in the big array.
+	 */
+	public static void incr( final KEY_GENERIC_TYPE[][] array, final long index ) {
+		array[ segment( index ) ][ displacement( index ) ]++;
+	}
+
+	/** Decrements the element of the given big array of specified index.
+	 * 
+	 * @param array a big array.
+	 * @param index a position in the big array.
+	 */
+	public static void decr( final KEY_GENERIC_TYPE[][] array, final long index ) {
+		array[ segment( index ) ][ displacement( index ) ]--;
+	}
+
+
+#endif
+
+
+	/** Returns the length of the given big array.
+	 * 
+	 * @param array a big array.
+	 * @return the length of the given big array.
+	 */
+	public static KEY_GENERIC long length( final KEY_GENERIC_TYPE[][] array ) {
+		final int length = array.length;
+		return length == 0 ? 0 : start( length - 1 ) + array[ length - 1 ].length;
+	}
+	
+	/** Copies a big array from the specified source big array, beginning at the specified position, to the specified position of the destination big array.
+	 * Handles correctly overlapping regions of the same big array. 
+	 * 
+	 * @param srcArray the source big array.
+	 * @param srcPos the starting position in the source big array.
+	 * @param destArray the destination big array.
+	 * @param destPos the starting position in the destination data.
+	 * @param length the number of elements to be copied.
+	 */
+	public static KEY_GENERIC void copy( final KEY_GENERIC_TYPE[][] srcArray, final long srcPos, final KEY_GENERIC_TYPE[][] destArray, final long destPos, long length ) {
+		if ( destPos <= srcPos ) {
+			int srcSegment = segment( srcPos );
+			int destSegment = segment( destPos );
+			int srcDispl = displacement( srcPos );
+			int destDispl = displacement( destPos );
+			int l;
+			while( length > 0 ) {
+				l = (int)Math.min( length, Math.min( srcArray[ srcSegment ].length - srcDispl, destArray[ destSegment ].length - destDispl ) );
+				System.arraycopy( srcArray[ srcSegment ], srcDispl, destArray[ destSegment ], destDispl, l );
+				if ( ( srcDispl += l ) == SEGMENT_SIZE ) {
+					srcDispl = 0;
+					srcSegment++;
+				}
+				if ( ( destDispl += l ) == SEGMENT_SIZE ) {
+					destDispl = 0;
+					destSegment++;
+				}
+				length -= l;
+			}
+		}
+		else {
+			int srcSegment = segment( srcPos + length );
+			int destSegment = segment( destPos + length  );
+			int srcDispl = displacement( srcPos + length  );
+			int destDispl = displacement( destPos + length  );
+			int l;
+			while( length > 0 ) {
+				if ( srcDispl == 0 ) {
+					srcDispl = SEGMENT_SIZE;
+					srcSegment--;
+				}
+				if ( destDispl == 0 ) {
+					destDispl = SEGMENT_SIZE;
+					destSegment--;
+				}
+				l = (int)Math.min( length, Math.min( srcDispl, destDispl ) );
+				System.arraycopy( srcArray[ srcSegment ], srcDispl - l, destArray[ destSegment ], destDispl - l, l );
+				srcDispl -= l;
+				destDispl -= l;
+				length -= l;
+			}
+		}
+	}
+
+	/** Copies a big array from the specified source big array, beginning at the specified position, to the specified position of the destination array.
+	 * 
+	 * @param srcArray the source big array.
+	 * @param srcPos the starting position in the source big array.
+	 * @param destArray the destination array.
+	 * @param destPos the starting position in the destination data.
+	 * @param length the number of elements to be copied.
+	 */
+	public static KEY_GENERIC void copyFromBig( final KEY_GENERIC_TYPE[][] srcArray, final long srcPos, final KEY_GENERIC_TYPE[] destArray, int destPos, int length ) {
+		int srcSegment = segment( srcPos );
+		int srcDispl = displacement( srcPos );
+		int l;
+		while( length > 0 ) {
+			l = Math.min( srcArray[ srcSegment ].length - srcDispl, length );
+			System.arraycopy( srcArray[ srcSegment ], srcDispl, destArray, destPos, l );
+			if ( ( srcDispl += l ) == SEGMENT_SIZE ) {
+				srcDispl = 0;
+				srcSegment++;
+			}
+			destPos += l;
+			length -= l;
+		}			
+	}
+	
+	/** Copies an array from the specified source array, beginning at the specified position, to the specified position of the destination big array.
+	 * 
+	 * @param srcArray the source array.
+	 * @param srcPos the starting position in the source array.
+	 * @param destArray the destination big array.
+	 * @param destPos the starting position in the destination data.
+	 * @param length the number of elements to be copied.
+	 */
+	public static KEY_GENERIC void copyToBig( final KEY_GENERIC_TYPE[] srcArray, int srcPos, final KEY_GENERIC_TYPE[][] destArray, final long destPos, long length ) {
+		int destSegment = segment( destPos );
+		int destDispl = displacement( destPos );
+		int l;
+		while( length > 0 ) {
+			l = (int)Math.min( destArray[ destSegment ].length - destDispl, length );
+			System.arraycopy( srcArray, srcPos, destArray[ destSegment ], destDispl, l );
+			if ( ( destDispl += l ) == SEGMENT_SIZE ) {
+				destDispl = 0;
+				destSegment++;
+			}
+			srcPos += l;
+			length -= l;
+		}
+	}
+	
+#if #keyclass(Object)	
+	/** Creates a new big array using the given one as prototype. 
+	 *
+	 * <P>This method returns a new big array of the given length whose element
+	 * are of the same class as of those of <code>prototype</code>. In case
+	 * of an empty big array, it tries to return {@link #EMPTY_BIG_ARRAY}, if possible.
+	 *
+	 * @param prototype a big array that will be used to type the new one.
+	 * @param length the length of the new big array.
+	 * @return a new big array of given type and length.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static <K> K[][] newBigArray( final K[][] prototype, final long length ) {
+		return (K[][])newBigArray( prototype.getClass().getComponentType(), length );
+	}
+
+	/** Creates a new big array using a the given one as component type. 
+	 *
+	 * <P>This method returns a new big array whose segments 
+	 * are of class <code>componentType</code>. In case
+	 * of an empty big array, it tries to return {@link #EMPTY_BIG_ARRAY}, if possible.
+	 *
+	 * @param componentType a class representing the type of segments of the array to be created.
+	 * @param length the length of the new big array.
+	 * @return a new big array of given type and length.
+	 */
+
+	@SuppressWarnings("unchecked")
+	private static Object[][] newBigArray( Class<?> componentType, final long length ) {
+		if ( length == 0 && componentType == Object[].class ) return EMPTY_BIG_ARRAY;
+		final int baseLength = (int)((length + SEGMENT_MASK) / SEGMENT_SIZE);
+		Object[][] base = (Object[][])java.lang.reflect.Array.newInstance( componentType, baseLength );
+		final int residual = (int)(length & SEGMENT_MASK);
+		if ( residual != 0 ) {
+			for( int i = 0; i < baseLength - 1; i++ ) base[ i ] = (Object[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), SEGMENT_SIZE );
+			base[ baseLength - 1 ] = (Object[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), residual );
+		}
+		else for( int i = 0; i < baseLength; i++ ) base[ i ] = (Object[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), SEGMENT_SIZE );
+
+		return base;
+	}
+#endif
+
+	/** Creates a new big array.
+	 *
+	 * @param length the length of the new big array.
+	 * @return a new big array of given length.
+	 */
+
+	public static KEY_TYPE[][] newBigArray( final long length ) {
+		if ( length == 0 ) return EMPTY_BIG_ARRAY;
+		final int baseLength = (int)((length + SEGMENT_MASK) / SEGMENT_SIZE);
+		KEY_TYPE[][] base = new KEY_TYPE[ baseLength ][];
+		final int residual = (int)(length & SEGMENT_MASK);
+		if ( residual != 0 ) {
+			for( int i = 0; i < baseLength - 1; i++ ) base[ i ] = new KEY_TYPE[ SEGMENT_SIZE ];
+			base[ baseLength - 1 ] = new KEY_TYPE[ residual ];
+		}
+		else for( int i = 0; i < baseLength; i++ ) base[ i ] = new KEY_TYPE[ SEGMENT_SIZE ];
+		
+		return base;
+	}
+
+#if #keyclass(Object)
+	/** Turns a standard array into a big array.
+	 *
+	 * <P>Note that the returned big array might contain as a segment the original array.
+	 *
+	 * @param array an array.
+	 * @return a new big array with the same length and content of <code>array</code>.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static <K> K[][] wrap( final K[] array ) {
+		if ( array.length == 0 && array.getClass() == Object[].class ) return KEY_GENERIC_BIG_ARRAY_CAST EMPTY_BIG_ARRAY;
+		if ( array.length <= SEGMENT_SIZE ) {
+			final K[][] bigArray = (K[][])java.lang.reflect.Array.newInstance( array.getClass(), 1 );
+			bigArray[ 0 ] = array;
+			return bigArray;
+		}
+		final K[][] bigArray = (K[][])newBigArray( array.getClass(), array.length );
+		for( int i = 0; i < bigArray.length; i++ ) System.arraycopy( array, (int)start( i ), bigArray[ i ], 0, bigArray[ i ].length );
+		return bigArray;
+	}
+
+#else
+	/** Turns a standard array into a big array.
+	 *
+	 * <P>Note that the returned big array might contain as a segment the original array.
+	 *
+	 * @param array an array.
+	 * @return a new big array with the same length and content of <code>array</code>.
+	 */
+
+	public static KEY_TYPE[][] wrap( final KEY_TYPE[] array ) {
+		if ( array.length == 0 ) return EMPTY_BIG_ARRAY;
+		if ( array.length <= SEGMENT_SIZE ) return new KEY_TYPE[][] { array };
+		final KEY_TYPE[][] bigArray = newBigArray( array.length );
+		for( int i = 0; i < bigArray.length; i++ ) System.arraycopy( array, (int)start( i ), bigArray[ i ], 0, bigArray[ i ].length );
+		return bigArray;
+	}
+#endif
+	/** Ensures that a big array can contain the given number of entries.
+	 *
+	 * <P>If you cannot foresee whether this big array will need again to be
+	 * enlarged, you should probably use <code>grow()</code> instead.
+	 *
+	 * <p><strong>Warning:</strong> the returned array might use part of the segments of the original
+	 * array, which must be considered read-only after calling this method.
+	 *
+	 * @param array a big array.
+	 * @param length the new minimum length for this big array.
+	 * @return <code>array</code>, if it contains <code>length</code> entries or more; otherwise,
+	 * a big array with <code>length</code> entries whose first <code>length(array)</code>
+	 * entries are the same as those of <code>array</code>.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] ensureCapacity( final KEY_GENERIC_TYPE[][] array, final long length ) {
+		return ensureCapacity( array, length, length( array ) );
+	}
+
+#if #keyclass(Object)
+
+	/** Ensures that a big array can contain the given number of entries, preserving just a part of the big array.
+	 *
+	 * <P>This method returns a new big array of the given length whose element
+	 * are of the same class as of those of <code>array</code>.
+	 *
+	 * <p><strong>Warning:</strong> the returned array might use part of the segments of the original
+	 * array, which must be considered read-only after calling this method.
+	 *
+	 * @param array a big array.
+	 * @param length the new minimum length for this big array.
+	 * @param preserve the number of elements of the big array that must be preserved in case a new allocation is necessary.
+	 * @return <code>array</code>, if it can contain <code>length</code> entries or more; otherwise,
+	 * a big array with <code>length</code> entries whose first <code>preserve</code>
+	 * entries are the same as those of <code>array</code>.
+	 */
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] ensureCapacity( final KEY_GENERIC_TYPE[][] array, final long length, final long preserve ) {
+		final long oldLength = length( array );
+		if ( length > oldLength ) {
+			final int valid = array.length - ( array.length == 0 || array.length > 0 && array[ array.length - 1 ].length == SEGMENT_SIZE ? 0 : 1 );
+			final int baseLength = (int)((length + SEGMENT_MASK) / SEGMENT_SIZE);
+			final KEY_GENERIC_TYPE[][] base = Arrays.copyOf( array, baseLength );
+			final Class<?> componentType = array.getClass().getComponentType();
+			final int residual = (int)(length & SEGMENT_MASK);
+			if ( residual != 0 ) {
+				for( int i = valid; i < baseLength - 1; i++ ) base[ i ] = (KEY_GENERIC_TYPE[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), SEGMENT_SIZE );
+				base[ baseLength - 1 ] = (KEY_GENERIC_TYPE[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), residual );
+			}
+			else for( int i = valid; i < baseLength; i++ ) base[ i ] = (KEY_GENERIC_TYPE[])java.lang.reflect.Array.newInstance( componentType.getComponentType(), SEGMENT_SIZE );
+
+			if ( preserve - ( valid * (long)SEGMENT_SIZE ) > 0 ) copy( array, valid * (long)SEGMENT_SIZE, base, valid * (long)SEGMENT_SIZE, preserve - ( valid * (long)SEGMENT_SIZE ) );
+			return base;
+		}
+		return array;
+	}
+	
+#else
+
+	/** Ensures that a big array can contain the given number of entries, preserving just a part of the big array.
+	 *
+	 * <p><strong>Warning:</strong> the returned array might use part of the segments of the original
+	 * array, which must be considered read-only after calling this method.
+	 *
+	 * @param array a big array.
+	 * @param length the new minimum length for this big array.
+	 * @param preserve the number of elements of the big array that must be preserved in case a new allocation is necessary.
+	 * @return <code>array</code>, if it can contain <code>length</code> entries or more; otherwise,
+	 * a big array with <code>length</code> entries whose first <code>preserve</code>
+	 * entries are the same as those of <code>array</code>.
+	 */
+	public static KEY_TYPE[][] ensureCapacity( final KEY_TYPE[][] array, final long length, final long preserve ) {
+		final long oldLength = length( array );
+		if ( length > oldLength ) {
+			final int valid = array.length - ( array.length == 0 || array.length > 0 && array[ array.length - 1 ].length == SEGMENT_SIZE ? 0 : 1 );
+			final int baseLength = (int)((length + SEGMENT_MASK) / SEGMENT_SIZE);
+			final KEY_TYPE[][] base = Arrays.copyOf( array, baseLength );
+			final int residual = (int)(length & SEGMENT_MASK);
+			if ( residual != 0 ) {
+				for( int i = valid; i < baseLength - 1; i++ ) base[ i ] = new KEY_TYPE[ SEGMENT_SIZE ];
+				base[ baseLength - 1 ] = new KEY_TYPE[ residual ];
+			}
+			else for( int i = valid; i < baseLength; i++ ) base[ i ] = new KEY_TYPE[ SEGMENT_SIZE ];
+
+			if ( preserve - ( valid * (long)SEGMENT_SIZE ) > 0 ) copy( array, valid * (long)SEGMENT_SIZE, base, valid * (long)SEGMENT_SIZE, preserve - ( valid * (long)SEGMENT_SIZE ) );
+			return base;
+		}
+		return array;
+	}
+
+#endif
+
+	/** Grows the given big array to the maximum between the given length and
+	 * the current length multiplied by two, provided that the given
+	 * length is larger than the current length.
+	 *
+	 * <P>If you want complete control on the big array growth, you
+	 * should probably use <code>ensureCapacity()</code> instead.
+	 *
+	 * <p><strong>Warning:</strong> the returned array might use part of the segments of the original
+	 * array, which must be considered read-only after calling this method.
+	 *
+	 * @param array a big array.
+	 * @param length the new minimum length for this big array.
+	 * @return <code>array</code>, if it can contain <code>length</code>
+	 * entries; otherwise, a big array with
+	 * max(<code>length</code>,<code>length(array)</code>/φ) entries whose first
+	 * <code>length(array)</code> entries are the same as those of <code>array</code>.
+	 * */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] grow( final KEY_GENERIC_TYPE[][] array, final long length ) {
+		final long oldLength = length( array );
+		return length > oldLength ? grow( array, length, oldLength ) : array;
+	}
+
+	/** Grows the given big array to the maximum between the given length and
+	 * the current length multiplied by two, provided that the given
+	 * length is larger than the current length, preserving just a part of the big array.
+	 *
+	 * <P>If you want complete control on the big array growth, you
+	 * should probably use <code>ensureCapacity()</code> instead.
+	 *
+	 * <p><strong>Warning:</strong> the returned array might use part of the segments of the original
+	 * array, which must be considered read-only after calling this method.
+	 *
+	 * @param array a big array.
+	 * @param length the new minimum length for this big array.
+	 * @param preserve the number of elements of the big array that must be preserved in case a new allocation is necessary.
+	 * @return <code>array</code>, if it can contain <code>length</code>
+	 * entries; otherwise, a big array with
+	 * max(<code>length</code>,<code>length(array)</code>/φ) entries whose first
+	 * <code>preserve</code> entries are the same as those of <code>array</code>.
+	 * */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] grow( final KEY_GENERIC_TYPE[][] array, final long length, final long preserve ) {
+		final long oldLength = length( array );
+		return length > oldLength ? ensureCapacity( array, Math.max( 2 * oldLength, length ), preserve ) : array;
+	}
+
+#if #keyclass(Object)
+
+	/** Trims the given big array to the given length.
+	 *
+	 * <p><strong>Warning:</strong> the returned array might use part of the segments of the original
+	 * array, which must be considered read-only after calling this method.
+	 *
+	 * @param array a big array.
+	 * @param length the new maximum length for the big array.
+	 * @return <code>array</code>, if it contains <code>length</code>
+	 * entries or less; otherwise, a big array with
+	 * <code>length</code> entries whose entries are the same as
+	 * the first <code>length</code> entries of <code>array</code>.
+	 * 
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] trim( final KEY_GENERIC_TYPE[][] array, final long length ) {
+		final long oldLength = length( array );
+		if ( length >= oldLength ) return array;
+		final int baseLength = (int)((length + SEGMENT_MASK) / SEGMENT_SIZE);
+		final KEY_GENERIC_TYPE[][] base = Arrays.copyOf( array, baseLength );
+		final int residual = (int)(length & SEGMENT_MASK);
+		if ( residual != 0 ) base[ baseLength - 1 ] = ARRAYS.trim( base[ baseLength - 1 ], residual );
+		return base;
+	}
+
+#else
+
+	/** Trims the given big array to the given length.
+	 *
+	 * <p><strong>Warning:</strong> the returned array might use part of the segments of the original
+	 * array, which must be considered read-only after calling this method.
+	 *
+	 * @param array a big array.
+	 * @param length the new maximum length for the big array.
+	 * @return <code>array</code>, if it contains <code>length</code>
+	 * entries or less; otherwise, a big array with
+	 * <code>length</code> entries whose entries are the same as
+	 * the first <code>length</code> entries of <code>array</code>.
+	 * 
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] trim( final KEY_GENERIC_TYPE[][] array, final long length ) {
+		final long oldLength = length( array );
+		if ( length >= oldLength ) return array;
+		final int baseLength = (int)((length + SEGMENT_MASK) / SEGMENT_SIZE);
+		final KEY_TYPE[][] base = Arrays.copyOf( array, baseLength );
+		final int residual = (int)(length & SEGMENT_MASK);
+		if ( residual != 0 ) base[ baseLength - 1 ] = ARRAYS.trim( base[ baseLength - 1 ], residual );
+		return base;
+	}
+
+#endif
+
+	/** Sets the length of the given big array.
+	 *
+	 * <p><strong>Warning:</strong> the returned array might use part of the segments of the original
+	 * array, which must be considered read-only after calling this method.
+	 *
+	 * @param array a big array.
+	 * @param length the new length for the big array.
+	 * @return <code>array</code>, if it contains exactly <code>length</code>
+	 * entries; otherwise, if it contains <em>more</em> than
+	 * <code>length</code> entries, a big array with <code>length</code> entries
+	 * whose entries are the same as the first <code>length</code> entries of
+	 * <code>array</code>; otherwise, a big array with <code>length</code> entries
+	 * whose first <code>length(array)</code> entries are the same as those of
+	 * <code>array</code>.
+	 * 
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] setLength( final KEY_GENERIC_TYPE[][] array, final long length ) {
+		final long oldLength = length( array );
+		if ( length == oldLength ) return array;
+		if ( length < oldLength ) return trim( array, length );
+		return ensureCapacity( array, length );
+	}
+
+	/** Returns a copy of a portion of a big array.
+	 *
+	 * @param array a big array.
+	 * @param offset the first element to copy.
+	 * @param length the number of elements to copy.
+	 * @return a new big array containing <code>length</code> elements of <code>array</code> starting at <code>offset</code>.
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] copy( final KEY_GENERIC_TYPE[][] array, final long offset, final long length ) {
+		ensureOffsetLength( array, offset, length );
+		final KEY_GENERIC_TYPE[][] a = 
+#if #keyclass(Object)
+			newBigArray( array, length );
+#else
+			newBigArray( length );
+#endif
+		copy( array, offset, a, 0, length );
+		return a;
+	}
+
+	/** Returns a copy of a big array.
+	 *
+	 * @param array a big array.
+	 * @return a copy of <code>array</code>.
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] copy( final KEY_GENERIC_TYPE[][] array ) {
+		final KEY_GENERIC_TYPE[][] base = array.clone();
+		for( int i = base.length; i-- != 0; ) base[ i ] = array[ i ].clone();
+		return base;
+	}
+
+	/** Fills the given big array with the given value.
+	 *
+	 * <P>This method uses a backward loop. It is significantly faster than the corresponding
+	 * method in {@link java.util.Arrays}.
+	 *
+	 * @param array a big array.
+	 * @param value the new value for all elements of the big array.
+	 */
+
+	public static KEY_GENERIC void fill( final KEY_GENERIC_TYPE[][] array, final KEY_GENERIC_TYPE value ) {
+		for( int i = array.length; i-- != 0; ) ARRAYS.fill( array[ i ], value );
+	}
+
+	/** Fills a portion of the given big array with the given value.
+	 *
+	 * <P>If possible (i.e., <code>from</code> is 0) this method uses a
+	 * backward loop. In this case, it is significantly faster than the
+	 * corresponding method in {@link java.util.Arrays}.
+	 *
+	 * @param array a big array.
+	 * @param from the starting index of the portion to fill.
+	 * @param to the end index of the portion to fill.
+	 * @param value the new value for all elements of the specified portion of the big array.
+	 */
+
+	public static KEY_GENERIC void fill( final KEY_GENERIC_TYPE[][] array, final long from, long to, final KEY_GENERIC_TYPE value ) {
+		final long length = length( array );
+		BigArrays.ensureFromTo( length, from, to );
+		int fromSegment = segment( from );
+		int toSegment = segment( to );
+		int fromDispl = displacement( from );
+		int toDispl = displacement( to );
+		if ( fromSegment == toSegment ) {
+			ARRAYS.fill( array[ fromSegment ], fromDispl, toDispl, value );
+			return;
+		}
+
+		if ( toDispl != 0 ) ARRAYS.fill( array[ toSegment ], 0, toDispl, value );
+		while( --toSegment > fromSegment ) ARRAYS.fill( array[ toSegment ], value );
+		ARRAYS.fill( array[ fromSegment ], fromDispl, SEGMENT_SIZE, value );
+	}
+
+
+	/** Returns true if the two big arrays are elementwise equal.
+	 *
+	 * <P>This method uses a backward loop. It is significantly faster than the corresponding
+	 * method in {@link java.util.Arrays}.
+	 *
+	 * @param a1 a big array.
+	 * @param a2 another big array.
+	 * @return true if the two big arrays are of the same length, and their elements are equal.
+	 */
+
+	public static KEY_GENERIC boolean equals( final KEY_GENERIC_TYPE[][] a1, final KEY_GENERIC_TYPE a2[][] ) {
+		if ( length( a1 ) != length( a2 ) ) return false;
+		int i = a1.length, j;
+		KEY_GENERIC_TYPE[] t, u;
+		while( i-- != 0 ) {
+			t = a1[ i ];
+			u = a2[ i ];
+			j = t.length;
+			while( j-- != 0 ) if (! KEY_EQUALS( t[ j ], u[ j ] ) ) return false;
+		}
+		return true;
+	}
+
+	/* Returns a string representation of the contents of the specified big array. 
+	 *
+	 * The string representation consists of a list of the big array's elements, enclosed in square brackets ("[]"). Adjacent elements are separated by the characters ", " (a comma followed by a space). Returns "null" if <code>a</code> is null.
+	 * @param a the big array whose string representation to return.
+	 * @return the string representation of <code>a</code>.
+	 */
+
+	public static KEY_GENERIC String toString( final KEY_GENERIC_TYPE[][] a ) {
+		if ( a == null ) return "null";
+		final long last = length( a ) - 1;
+		if ( last == - 1 ) return "[]";
+		final StringBuilder b = new StringBuilder();
+		b.append('[');
+		for ( long i = 0; ; i++ ) {
+			b.append( String.valueOf( get( a, i ) ) );
+			if ( i == last ) return b.append(']').toString();
+			b.append(", ");
+        }
+	}
+
+
+	/** Ensures that a range given by its first (inclusive) and last (exclusive) elements fits a big array.
+	 *
+	 * <P>This method may be used whenever a big array range check is needed.
+	 *
+	 * @param a a big array.
+	 * @param from a start index (inclusive).
+	 * @param to an end index (inclusive).
+	 * @throws IllegalArgumentException if <code>from</code> is greater than <code>to</code>.
+	 * @throws ArrayIndexOutOfBoundsException if <code>from</code> or <code>to</code> are greater than the big array length or negative.
+	 */
+	public static KEY_GENERIC void ensureFromTo( final KEY_GENERIC_TYPE[][] a, final long from, final long to ) {
+		BigArrays.ensureFromTo( length( a ), from, to );
+	}
+
+	/** Ensures that a range given by an offset and a length fits a big array.
+	 *
+	 * <P>This method may be used whenever a big array range check is needed.
+	 *
+	 * @param a a big array.
+	 * @param offset a start index.
+	 * @param length a length (the number of elements in the range).
+	 * @throws IllegalArgumentException if <code>length</code> is negative.
+	 * @throws ArrayIndexOutOfBoundsException if <code>offset</code> is negative or <code>offset</code>+<code>length</code> is greater than the big array length.
+	 */
+	public static KEY_GENERIC void ensureOffsetLength( final KEY_GENERIC_TYPE[][] a, final long offset, final long length ) {
+		BigArrays.ensureOffsetLength( length( a ), offset, length );
+	}
+
+
+	/** A type-specific content-based hash strategy for big arrays. */
+
+	private static final class BigArrayHashStrategy KEY_GENERIC implements Hash.Strategy<KEY_GENERIC_TYPE[][]>, java.io.Serializable {
+    	private static final long serialVersionUID = -7046029254386353129L;
+    
+		public int hashCode( final KEY_GENERIC_TYPE[][] o ) {
+			return java.util.Arrays.deepHashCode( o );
+		}
+		
+		public boolean equals( final KEY_GENERIC_TYPE[][] a, final KEY_GENERIC_TYPE[][] b ) {
+			return BIG_ARRAYS.equals( a, b );
+		}
+	}
+
+	/** A type-specific content-based hash strategy for big arrays.
+	 *
+	 * <P>This hash strategy may be used in custom hash collections whenever keys are
+	 * big arrays, and they must be considered equal by content. This strategy
+	 * will handle <code>null</code> correctly, and it is serializable.
+	 */
+
+	@SuppressWarnings({"unchecked", "rawtypes"})
+	public final static Hash.Strategy HASH_STRATEGY = new BigArrayHashStrategy();
+
+	private static final int SMALL = 7;
+	private static final int MEDIUM = 40;
+
+	private static KEY_GENERIC void vecSwap( final KEY_GENERIC_TYPE[][] x, long a, long b, final long n ) {
+		for( int i = 0; i < n; i++, a++, b++ ) swap( x, a, b );
+	}
+ 
+	private static KEY_GENERIC long med3( final KEY_GENERIC_TYPE x[][], final long a, final long b, final long c, KEY_COMPARATOR KEY_GENERIC comp ) {
+		int ab = comp.compare( get( x, a ), get( x, b ) );
+		int ac = comp.compare( get( x, a ), get( x, c ) );
+		int bc = comp.compare( get( x, b ), get( x, c ) );
+		return ( ab < 0 ?
+			( bc < 0 ? b : ac < 0 ? c : a ) :
+			( bc > 0 ? b : ac > 0 ? c : a ) );
+	}
+
+	private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[][] a, final long from, final long to, final KEY_COMPARATOR KEY_GENERIC comp ) {
+		for( long i = from; i < to - 1; i++ ) {
+			long m = i;
+			for( long j = i + 1; j < to; j++ ) if ( comp.compare( BIG_ARRAYS.get( a, j ), BIG_ARRAYS.get( a, m ) ) < 0 ) m = j;
+			if ( m != i ) swap( a, i, m );
+		}
+	}
+
+	/** Sorts the specified range of elements according to the order induced by the specified
+	 * comparator using quicksort. 
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param x the big array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param comp the comparator to determine the sorting order.
+	 */
+	public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[][] x, final long from, final long to, final KEY_COMPARATOR KEY_GENERIC comp ) {
+		final long len = to - from;
+
+		// Selection sort on smallest arrays
+		if ( len < SMALL ) {
+			selectionSort( x, from, to, comp );
+			return;
+		}
+
+		// Choose a partition element, v
+		long m = from + len / 2;	 // Small arrays, middle element
+		if ( len > SMALL ) {
+			long l = from;
+			long n = to - 1;
+			if ( len > MEDIUM ) {		// Big arrays, pseudomedian of 9
+				long s = len / 8;
+				l = med3( x, l, l + s, l + 2 * s, comp );
+				m = med3( x, m - s, m, m + s, comp );
+				n = med3( x, n - 2 * s, n - s, n, comp );
+			}
+			m = med3( x, l, m, n, comp ); // Mid-size, med of 3
+		}
+		
+		final KEY_GENERIC_TYPE v = get( x, m );
+
+		// Establish Invariant: v* (<v)* (>v)* v*
+		long a = from, b = a, c = to - 1, d = c;
+		while(true) {
+			int comparison;
+			while ( b <= c && ( comparison = comp.compare( get( x, b ), v ) ) <= 0 ) {
+				if ( comparison == 0 ) swap( x, a++, b );
+				b++;
+			}
+			while (c >= b && ( comparison = comp.compare( get( x, c ), v ) ) >=0 ) {
+				if ( comparison == 0 ) swap( x, c, d-- );
+				c--;
+			}
+			if ( b > c ) break;
+			swap( x, b++, c-- );
+		}
+
+		// Swap partition elements back to middle
+		long s, n = to;
+		s = Math.min( a - from, b - a );
+		vecSwap( x, from, b - s, s );
+		s = Math.min( d - c, n - d- 1 );
+		vecSwap( x, b, n - s, s );
+
+		// Recursively sort non-partition-elements
+		if ( ( s = b - a ) > 1 ) quickSort( x, from, from + s, comp );
+		if ( ( s = d - c ) > 1 ) quickSort( x, n - s, n, comp );
+
+	}
+
+	@SuppressWarnings("unchecked")
+	private static KEY_GENERIC long med3( final KEY_GENERIC_TYPE x[][], final long a, final long b, final long c ) {
+		int ab = KEY_CMP( get( x, a ), get( x, b ) );
+		int ac = KEY_CMP( get( x, a ), get( x, c ) );
+		int bc = KEY_CMP( get( x, b ), get( x, c ) );
+		return ( ab < 0 ?
+			( bc < 0 ? b : ac < 0 ? c : a ) :
+			( bc > 0 ? b : ac > 0 ? c : a ) );
+	}
+
+
+	@SuppressWarnings("unchecked")
+	private static KEY_GENERIC void selectionSort( final KEY_GENERIC_TYPE[][] a, final long from, final long to ) {
+		for( long i = from; i < to - 1; i++ ) {
+			long m = i;
+			for( long j = i + 1; j < to; j++ ) if ( KEY_LESS( BIG_ARRAYS.get( a, j ), BIG_ARRAYS.get( a, m ) ) ) m = j;
+			if ( m != i ) swap( a, i, m );
+		}
+	}
+
+	/** Sorts the specified big array according to the order induced by the specified
+	 * comparator using quicksort. 
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param x the big array to be sorted.
+	 * @param comp the comparator to determine the sorting order.
+	 * 
+	 */
+	public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[][] x, final KEY_COMPARATOR KEY_GENERIC comp ) {
+		quickSort( x, 0, BIG_ARRAYS.length( x ), comp );
+	}
+	
+	/** Sorts the specified range of elements according to the natural ascending order using quicksort.
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param x the big array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[][] x, final long from, final long to ) {
+		final long len = to - from;
+
+		// Selection sort on smallest arrays
+		if ( len < SMALL ) {
+			selectionSort( x, from, to );
+			return;
+		}
+
+		// Choose a partition element, v
+		long m = from + len / 2;	 // Small arrays, middle element
+		if ( len > SMALL ) {
+			long l = from;
+			long n = to - 1;
+			if ( len > MEDIUM ) {		// Big arrays, pseudomedian of 9
+				long s = len / 8;
+				l = med3( x, l, l + s, l + 2 * s );
+				m = med3( x, m - s, m, m + s );
+				n = med3( x, n - 2 * s, n - s, n );
+			}
+			m = med3( x, l, m, n ); // Mid-size, med of 3
+		}
+		
+		final KEY_GENERIC_TYPE v = get( x, m );
+
+		// Establish Invariant: v* (<v)* (>v)* v*
+		long a = from, b = a, c = to - 1, d = c;
+		while(true) {
+			int comparison;
+			while ( b <= c && ( comparison = KEY_CMP( get( x, b ), v ) ) <= 0 ) {
+				if ( comparison == 0 ) swap( x, a++, b );
+				b++;
+			}
+			while (c >= b && ( comparison = KEY_CMP( get( x, c ), v ) ) >=0 ) {
+				if ( comparison == 0 ) swap( x, c, d-- );
+				c--;
+			}
+			if ( b > c ) break;
+			swap( x, b++, c-- );
+		}
+
+		// Swap partition elements back to middle
+		long s, n = to;
+		s = Math.min( a - from, b - a );
+		vecSwap( x, from, b - s, s );
+		s = Math.min( d - c, n - d- 1 );
+		vecSwap( x, b, n - s, s );
+
+		// Recursively sort non-partition-elements
+		if ( ( s = b - a ) > 1 ) quickSort( x, from, from + s );
+		if ( ( s = d - c ) > 1 ) quickSort( x, n - s, n );
+
+	}
+
+
+	/** Sorts the specified big array according to the natural ascending order using quicksort.
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param x the big array to be sorted.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC void quickSort( final KEY_GENERIC_TYPE[][] x ) {
+		quickSort( x, 0, BIG_ARRAYS.length( x ) );
+	}
+
+
+
+
+#if ! #keyclass(Boolean)
+
+	/**
+	 * Searches a range of the specified big array for the specified value using 
+	 * the binary search algorithm. The range must be sorted prior to making this call. 
+	 * If it is not sorted, the results are undefined. If the range contains multiple elements with 
+	 * the specified value, there is no guarantee which one will be found.
+	 *
+	 * @param a the big array to be searched.
+	 * @param from  the index of the first element (inclusive) to be searched.
+	 * @param to  the index of the last element (exclusive) to be searched.
+	 * @param key the value to be searched for.
+	 * @return index of the search key, if it is contained in the big array;
+	 *             otherwise, <tt>(-(<i>insertion point</i>) - 1)</tt>.  The <i>insertion
+	 *             point</i> is defined as the the point at which the value would
+	 *             be inserted into the big array: the index of the first
+	 *             element greater than the key, or the length of the big array, if all
+	 *             elements in the big array are less than the specified key.  Note
+	 *             that this guarantees that the return value will be >= 0 if
+	 *             and only if the key is found.
+	 * @see java.util.Arrays
+	 */
+	@SuppressWarnings({"unchecked","rawtypes"})
+	public static KEY_GENERIC long binarySearch( final KEY_GENERIC_TYPE[][] a, long from, long to, final KEY_GENERIC_TYPE key ) {
+		KEY_GENERIC_TYPE midVal;
+		to--;
+		while (from <= to) {
+			final long mid = (from + to) >>> 1;
+			midVal = get( a, mid );
+#if #keys(primitive)
+			if (midVal < key) from = mid + 1;
+			else if (midVal > key) to = mid - 1;
+			else return mid;
+#else
+			final int cmp = ((Comparable)midVal).compareTo( key );
+			if ( cmp < 0 ) from = mid + 1;
+			else if (cmp > 0) to = mid - 1;
+			else return mid;
+#endif
+        }
+		return -( from + 1 );
+	}
+
+	/**
+	 * Searches a big array for the specified value using 
+	 * the binary search algorithm. The range must be sorted prior to making this call. 
+	 * If it is not sorted, the results are undefined. If the range contains multiple elements with 
+	 * the specified value, there is no guarantee which one will be found.
+	 *
+	 * @param a the big array to be searched.
+	 * @param key the value to be searched for.
+	 * @return index of the search key, if it is contained in the big array;
+	 *             otherwise, <tt>(-(<i>insertion point</i>) - 1)</tt>.  The <i>insertion
+	 *             point</i> is defined as the the point at which the value would
+	 *             be inserted into the big array: the index of the first
+	 *             element greater than the key, or the length of the big array, if all
+	 *             elements in the big array are less than the specified key.  Note
+	 *             that this guarantees that the return value will be >= 0 if
+	 *             and only if the key is found.
+	 * @see java.util.Arrays
+	 */
+	public static KEY_GENERIC long binarySearch( final KEY_GENERIC_TYPE[][] a, final KEY_TYPE key ) {
+		return binarySearch( a, 0, BIG_ARRAYS.length( a ), key );
+	}
+
+	/**
+	 * Searches a range of the specified big array for the specified value using 
+	 * the binary search algorithm and a specified comparator. The range must be sorted following the comparator prior to making this call. 
+	 * If it is not sorted, the results are undefined. If the range contains multiple elements with 
+	 * the specified value, there is no guarantee which one will be found.
+	 *
+	 * @param a the big array to be searched.
+	 * @param from  the index of the first element (inclusive) to be searched.
+	 * @param to  the index of the last element (exclusive) to be searched.
+	 * @param key the value to be searched for.
+	 * @param c a comparator.
+	 * @return index of the search key, if it is contained in the big array;
+	 *             otherwise, <tt>(-(<i>insertion point</i>) - 1)</tt>.  The <i>insertion
+	 *             point</i> is defined as the the point at which the value would
+	 *             be inserted into the big array: the index of the first
+	 *             element greater than the key, or the length of the big array, if all
+	 *             elements in the big array are less than the specified key.  Note
+	 *             that this guarantees that the return value will be >= 0 if
+	 *             and only if the key is found.
+	 * @see java.util.Arrays
+	 */
+	public static KEY_GENERIC long binarySearch( final KEY_GENERIC_TYPE[][] a, long from, long to, final KEY_GENERIC_TYPE key, final KEY_COMPARATOR KEY_GENERIC c ) {
+		KEY_GENERIC_TYPE midVal;
+		to--;
+		while (from <= to) {
+			final long mid = (from + to) >>> 1;
+			midVal = get( a, mid );
+			final int cmp = c.compare( midVal, key );
+			if ( cmp < 0 ) from = mid + 1;
+			else if (cmp > 0) to = mid - 1;
+			else return mid; // key found
+		}
+		return -( from + 1 );
+	}
+
+	/**
+	 * Searches a big array for the specified value using 
+	 * the binary search algorithm and a specified comparator. The range must be sorted following the comparator prior to making this call. 
+	 * If it is not sorted, the results are undefined. If the range contains multiple elements with 
+	 * the specified value, there is no guarantee which one will be found.
+	 *
+	 * @param a the big array to be searched.
+	 * @param key the value to be searched for.
+	 * @param c a comparator.
+	 * @return index of the search key, if it is contained in the big array;
+	 *             otherwise, <tt>(-(<i>insertion point</i>) - 1)</tt>.  The <i>insertion
+	 *             point</i> is defined as the the point at which the value would
+	 *             be inserted into the big array: the index of the first
+	 *             element greater than the key, or the length of the big array, if all
+	 *             elements in the big array are less than the specified key.  Note
+	 *             that this guarantees that the return value will be >= 0 if
+	 *             and only if the key is found.
+	 * @see java.util.Arrays
+	 */
+	public static KEY_GENERIC long binarySearch( final KEY_GENERIC_TYPE[][] a, final KEY_GENERIC_TYPE key, final KEY_COMPARATOR KEY_GENERIC c ) {
+		return binarySearch( a, 0, BIG_ARRAYS.length( a ), key, c );
+	}
+
+
+#if #keys(primitive)
+	/** The size of a digit used during radix sort (must be a power of 2). */
+	private static final int DIGIT_BITS = 8;
+	/** The mask to extract a digit of {@link #DIGIT_BITS} bits. */
+	private static final int DIGIT_MASK = ( 1 << DIGIT_BITS ) - 1;
+	/** The number of digits per element. */
+	private static final int DIGITS_PER_ELEMENT = KEY_CLASS.SIZE / DIGIT_BITS;
+
+	/** This method fixes negative numbers so that the combination exponent/significand is lexicographically sorted. */
+#if #keyclass(Double)
+	private static final long fixDouble( final double d ) {
+		final long l = Double.doubleToRawLongBits( d );
+		return l >= 0 ? l : l ^ 0x7FFFFFFFFFFFFFFFL;
+	}	   
+#elif #keyclass(Float)
+	private static final long fixFloat( final float f ) {
+		final long i = Float.floatToRawIntBits( f );
+		return i >= 0 ? i : i ^ 0x7FFFFFFF;
+	}
+#endif
+
+
+	/** Sorts the specified big array using radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. 
+	 * It will allocate a support array of bytes with the same number of elements as the array to be sorted.
+	 * 
+	 * @param a the big array to be sorted.
+	 */
+	public static void radixSort( final KEY_TYPE[][] a ) {
+		radixSort( a, 0, BIG_ARRAYS.length( a ) );
+	}
+
+	/** Sorts the specified big array using radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. 
+	 * It will allocate a support array of bytes with the same number of elements as the array to be sorted.
+	 * 
+	 * @param a the big array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 */
+	public static void radixSort( final KEY_TYPE[][] a, final long from, final long to ) {
+		final int maxLevel = DIGITS_PER_ELEMENT - 1;
+
+		final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( DIGITS_PER_ELEMENT - 1 ) + 1;
+		final long[] offsetStack = new long[ stackSize ];
+		int offsetPos = 0;
+		final long[] lengthStack = new long[ stackSize ];
+		int lengthPos = 0;
+		final int[] levelStack = new int[ stackSize ];
+		int levelPos = 0;
+		
+		offsetStack[ offsetPos++ ] = from;
+		lengthStack[ lengthPos++ ] = to - from;
+		levelStack[ levelPos++ ] = 0;
+
+		final long[] count = new long[ 1 << DIGIT_BITS ];
+		final long[] pos = new long[ 1 << DIGIT_BITS ];
+		final byte[][] digit = ByteBigArrays.newBigArray( to - from );
+
+		while( offsetPos > 0 ) {
+			final long first = offsetStack[ --offsetPos ];
+			final long length = lengthStack[ --lengthPos ];
+			final int level = levelStack[ --levelPos ];
+#if #keyclass(Character)
+			final int signMask = 0;
+#else
+			final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0;
+#endif
+			
+			if ( length < MEDIUM ) {
+				selectionSort( a, first, first + length );
+				continue;
+			}
+			
+			final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key
+
+			// Count keys.
+
+			for( long i = length; i-- != 0; ) ByteBigArrays.set( digit, i, (byte)( ( ( KEY2LEXINT( BIG_ARRAYS.get( a, first + i ) ) >>> shift ) & DIGIT_MASK ) ^ signMask  ));
+			for( long i = length; i-- != 0; ) count[ ByteBigArrays.get( digit, i ) & 0xFF ]++;
+			// Compute cumulative distribution and push non-singleton keys on stack.
+			int lastUsed = -1;
+			
+			long p = 0;
+			for( int i = 0; i < 1 << DIGIT_BITS; i++ ) {
+				if ( count[ i ] != 0 ) {
+					lastUsed = i;
+					if ( level < maxLevel && count[ i ] > 1 ){
+						//System.err.println( " Pushing " + new StackEntry( first + pos[ i - 1 ], first + pos[ i ], level + 1 ) );
+						offsetStack[ offsetPos++ ] = p + first;
+						lengthStack[ lengthPos++ ] = count[ i ];
+						levelStack[ levelPos++ ] = level + 1;
+					}
+				}
+				pos[ i ] = ( p += count[ i ] );
+			}
+			
+			// When all slots are OK, the last slot is necessarily OK.
+			final long end = length - count[ lastUsed ];
+			count[ lastUsed ] = 0;
+
+			// i moves through the start of each block
+			int c = -1;
+			for( long i = 0, d; i < end; i += count[ c ], count[ c ] = 0 ) {
+				KEY_TYPE t = BIG_ARRAYS.get( a, i +first );
+				c = ByteBigArrays.get( digit, i ) & 0xFF;
+				while( ( d = --pos[ c ] ) > i ) {
+					final KEY_TYPE z = t;
+					final int zz = c;
+					t = BIG_ARRAYS.get( a, d + first );
+					c = ByteBigArrays.get( digit, d ) & 0xFF;
+					BIG_ARRAYS.set( a, d + first, z );
+					ByteBigArrays.set( digit, d, (byte)zz );
+				}
+
+				BIG_ARRAYS.set( a, i + first, t );
+			}
+		}
+	}
+
+
+	private static void selectionSort( final KEY_TYPE[][] a, final KEY_TYPE[][] b, final long from, final long to ) {
+		for( long i = from; i < to - 1; i++ ) {
+			long m = i;
+			for( long j = i + 1; j < to; j++ ) 
+				if ( KEY_LESS( BIG_ARRAYS.get( a, j ), BIG_ARRAYS.get( a, m ) ) || KEY_CMP_EQ( BIG_ARRAYS.get( a, j ), BIG_ARRAYS.get( a, m ) ) && KEY_LESS( BIG_ARRAYS.get( b, j ), BIG_ARRAYS.get( b, m ) ) ) m = j;
+			
+			if ( m != i ) {
+				KEY_TYPE t = BIG_ARRAYS.get( a, i );
+				BIG_ARRAYS.set( a, i, BIG_ARRAYS.get( a, m ) );
+				BIG_ARRAYS.set( a, m, t );
+				t = BIG_ARRAYS.get( b, i );
+				BIG_ARRAYS.set( b, i, BIG_ARRAYS.get( b, m ) );
+				BIG_ARRAYS.set( b, m, t );
+			}
+		}
+	}
+
+	/** Sorts the specified pair of big arrays lexicographically using radix sort.
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implements a <em>lexicographical</em> sorting of the arguments. Pairs of elements
+	 * in the same position in the two provided arrays will be considered a single key, and permuted
+	 * accordingly. In the end, either <code>a[ i ] < a[ i + 1 ]</code> or <code>a[ i ] == a[ i + 1 ]</code> and <code>b[ i ] <= b[ i + 1 ]</code>.
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. It will allocate a support array of bytes with the same number of elements as the arrays to be sorted.
+	 * 
+	 * @param a the first big array to be sorted.
+	 * @param b the second big array to be sorted.
+	 */
+
+	public static void radixSort( final KEY_TYPE[][] a, final KEY_TYPE[][] b ) {
+		radixSort( a, b, 0, BIG_ARRAYS.length( a ) );
+	}
+	
+	/** Sorts the specified pair of big arrays lexicographically using radix sort.
+	 * 
+	 * <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+	 * McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+	 * and further improved using the digit-oracle idea described by
+	 * Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+	 * <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+	 * Lecture Notes in Computer Science, pages 3−14, Springer (2008).
+	 *
+	 * <p>This method implements a <em>lexicographical</em> sorting of the arguments. Pairs of elements
+	 * in the same position in the two provided arrays will be considered a single key, and permuted
+	 * accordingly. In the end, either <code>a[ i ] < a[ i + 1 ]</code> or <code>a[ i ] == a[ i + 1 ]</code> and <code>b[ i ] <= b[ i + 1 ]</code>.
+	 *
+	 * <p>This implementation is significantly faster than quicksort 
+	 * already at small sizes (say, more than 10000 elements), but it can only
+	 * sort in ascending order. It will allocate a support array of bytes with the same number of elements as the arrays to be sorted.
+	 * 
+	 * @param a the first big array to be sorted.
+	 * @param b the second big array to be sorted.
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 */
+	public static void radixSort( final KEY_TYPE[][] a, final KEY_TYPE[][] b, final long from, final long to ) {
+		final int layers = 2;
+		if ( BIG_ARRAYS.length( a ) != BIG_ARRAYS.length( b ) ) throw new IllegalArgumentException( "Array size mismatch." );
+		final int maxLevel = DIGITS_PER_ELEMENT * layers - 1;
+		
+		final int stackSize = ( ( 1 << DIGIT_BITS ) - 1 ) * ( layers * DIGITS_PER_ELEMENT - 1 ) + 1;
+		final long[] offsetStack = new long[ stackSize ];
+		int offsetPos = 0;
+		final long[] lengthStack = new long[ stackSize ];
+		int lengthPos = 0;
+		final int[] levelStack = new int[ stackSize ];
+		int levelPos = 0;
+		
+		offsetStack[ offsetPos++ ] = from;
+		lengthStack[ lengthPos++ ] = to - from;
+		levelStack[ levelPos++ ] = 0;
+
+		final long[] count = new long[ 1 << DIGIT_BITS ];
+		final long[] pos = new long[ 1 << DIGIT_BITS ];
+		final byte[][] digit = ByteBigArrays.newBigArray( to - from );
+
+		while( offsetPos > 0 ) {
+			final long first = offsetStack[ --offsetPos ];
+			final long length = lengthStack[ --lengthPos ];
+			final int level = levelStack[ --levelPos ];
+#if #keyclass(Character)
+			final int signMask = 0;
+#else
+			final int signMask = level % DIGITS_PER_ELEMENT == 0 ? 1 << DIGIT_BITS - 1 : 0;
+#endif
+			
+			if ( length < MEDIUM ) {
+				selectionSort( a, b, first, first + length );
+				continue;
+			}
+			
+			final KEY_TYPE[][] k = level < DIGITS_PER_ELEMENT ? a : b; // This is the key array
+			final int shift = ( DIGITS_PER_ELEMENT - 1 - level % DIGITS_PER_ELEMENT ) * DIGIT_BITS; // This is the shift that extract the right byte from a key
+
+			// Count keys.
+			for( long i = length; i-- != 0; ) ByteBigArrays.set( digit, i, (byte)( ( ( KEY2LEXINT( BIG_ARRAYS.get( k, first + i ) ) >>> shift ) & DIGIT_MASK ) ^ signMask ) );
+			for( long i = length; i-- != 0; ) count[ ByteBigArrays.get( digit, i ) & 0xFF ]++;
+			// Compute cumulative distribution and push non-singleton keys on stack.
+			int lastUsed = -1;
+
+			long p = 0;
+			for( int i = 0; i < 1 << DIGIT_BITS; i++ ) {
+				if ( count[ i ] != 0 ) {
+					lastUsed = i;
+					if ( level < maxLevel && count[ i ] > 1 ){
+						offsetStack[ offsetPos++ ] = p + first;
+						lengthStack[ lengthPos++ ] = count[ i ];
+						levelStack[ levelPos++ ] = level + 1;
+					}
+				}
+				pos[ i ] = ( p += count[ i ] );
+			}
+
+			// When all slots are OK, the last slot is necessarily OK.
+			final long end = length - count[ lastUsed ];
+			count[ lastUsed ] = 0;
+			
+			// i moves through the start of each block
+			int c = -1;
+			for( long i = 0, d; i < end; i += count[ c ], count[ c ] = 0 ) {
+				KEY_TYPE t = BIG_ARRAYS.get( a, i + first );
+				KEY_TYPE u = BIG_ARRAYS.get( b, i + first );
+				c = ByteBigArrays.get( digit, i ) & 0xFF;
+				while( ( d = --pos[ c ] ) > i ) {
+					KEY_TYPE z = t;
+					final int zz = c;
+					t = BIG_ARRAYS.get( a, d + first );
+					BIG_ARRAYS.set( a, d + first, z );
+					z = u;
+					u = BIG_ARRAYS.get( b, d + first );
+					BIG_ARRAYS.set( b, d + first, z );
+					c = ByteBigArrays.get( digit, d ) & 0xFF;
+					ByteBigArrays.set( digit, d, (byte)zz );
+				}
+
+				BIG_ARRAYS.set( a, i + first, t );
+				BIG_ARRAYS.set( b, i + first, u );
+			}
+		}
+	}
+
+#endif
+
+#endif
+
+
+	/** Shuffles the specified big array fragment using the specified pseudorandom number generator.
+	 * 
+	 * @param a the big array to be shuffled.
+	 * @param from the index of the first element (inclusive) to be shuffled.
+	 * @param to the index of the last element (exclusive) to be shuffled.
+	 * @param random a pseudorandom number generator (please use a <a href="http://dsiutils.dsi.unimi.it/docs/it/unimi/dsi/util/XorShiftStarRandom.html">XorShift*</a> generator).
+	 * @return <code>a</code>.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] shuffle( final KEY_GENERIC_TYPE[][] a, final long from, final long to, final Random random ) {
+		for( long i = to - from; i-- != 0; ) {
+			final long p = ( random.nextLong() & 0x7FFFFFFFFFFFFFFFL ) % ( i + 1 ); 
+			final KEY_GENERIC_TYPE t = get( a, from + i );
+			set( a, from + i, get( a, from + p ) );
+			set( a, from + p, t );
+		}
+		return a;
+	}
+
+	/** Shuffles the specified big array using the specified pseudorandom number generator.
+	 * 
+	 * @param a the big array to be shuffled.
+	 * @param random a pseudorandom number generator (please use a <a href="http://dsiutils.dsi.unimi.it/docs/it/unimi/dsi/util/XorShiftStarRandom.html">XorShift*</a> generator).
+	 * @return <code>a</code>.
+	 */
+	public static KEY_GENERIC KEY_GENERIC_TYPE[][] shuffle( final KEY_GENERIC_TYPE[][] a, final Random random ) {
+		for( long i = length( a ); i-- != 0; ) {
+			final long p = ( random.nextLong() & 0x7FFFFFFFFFFFFFFFL ) % ( i + 1 ); 
+			final KEY_GENERIC_TYPE t = get( a, i );
+			set( a, i, get( a, p ) );
+			set( a, p, t );
+		}
+		return a;
+	}
+
+
+#if #keyclass(Integer)
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static Object[] k, v, nk;
+	private static KEY_TYPE kt[];
+	private static KEY_TYPE nkt[];
+	private static BIG_ARRAY_BIG_LIST topList;
+
+	protected static void speedTest( int n, boolean b ) {}
+	
+	protected static void test( int n ) {
+		KEY_TYPE[][] a = BIG_ARRAYS.newBigArray( n );
+		for( int i = 0; i < n; i++ ) set( a, i, i );
+		BIG_ARRAYS.copy( a, 0, a, 1, n - 2 );
+		assert a[ 0 ][ 0 ] == 0;
+		for( int i = 0; i < n - 2; i++ ) assert get( a, i + 1 ) == i;
+
+		for( int i = 0; i < n; i++ ) set( a, i, i );
+		BIG_ARRAYS.copy( a, 1, a, 0, n - 1 );
+		for( int i = 0; i < n - 1; i++ ) assert get( a, i ) == i + 1;
+
+		for( int i = 0; i < n; i++ ) set( a, i, i );
+		KEY_TYPE[] b = new KEY_TYPE[ n ];
+		for( int i = 0; i < n; i++ ) b[ i ] = i;
+		
+		assert equals( wrap( b ), a );
+
+		System.out.println("Test OK");
+		return;
+
+	}
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+#endif
+
+}
diff --git a/drv/BigList.drv b/drv/BigList.drv
new file mode 100644
index 0000000..5713ff3
--- /dev/null
+++ b/drv/BigList.drv
@@ -0,0 +1,162 @@
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.List;
+import it.unimi.dsi.fastutil.BigList;
+
+#if ! #keyclass(Reference)
+
+/** A type-specific {@link BigList}; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>Additionally, this interface strengthens {@link #iterator()}, {@link #listIterator()},
+ * {@link #listIterator(long)} and {@link #subList(long,long)}.
+ *
+ * <P>Besides polymorphic methods, this interfaces specifies methods to copy into an array or remove contiguous
+ * sublists. Although the abstract implementation of this interface provides simple, one-by-one implementations
+ * of these methods, it is expected that concrete implementation override them with optimized versions.
+ *
+ * @see List
+ */
+
+public interface BIG_LIST KEY_GENERIC extends BigList<KEY_GENERIC_CLASS>, COLLECTION KEY_GENERIC, Comparable<BigList<? extends KEY_GENERIC_CLASS>> {
+#else
+
+/** A type-specific {@link BigList}; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>Additionally, this interface strengthens {@link #iterator()}, {@link #listIterator()},
+ * {@link #listIterator(long)} and {@link #subList(long,long)}.
+ *
+ * <P>Besides polymorphic methods, this interfaces specifies methods to copy into an array or remove contiguous
+ * sublists. Although the abstract implementation of this interface provides simple, one-by-one implementations
+ * of these methods, it is expected that concrete implementation override them with optimized versions.
+ *
+ * @see List
+ */
+
+public interface BIG_LIST KEY_GENERIC extends BigList<KEY_GENERIC_CLASS>, COLLECTION KEY_GENERIC {
+#endif
+
+	/** Returns a type-specific big-list iterator on this type-specific big list.
+	 *
+	 * @see List#iterator()
+	 */
+	KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator();
+
+	/** Returns a type-specific big-list iterator on this type-specific big list.
+	 *
+	 * @see List#listIterator()
+	 */
+	KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator();
+
+	/** Returns a type-specific list iterator on this type-specific big list starting at a given index.
+	 *
+	 * @see BigList#listIterator(long)
+	 */
+	KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( long index );
+
+	/** Returns a type-specific view of the portion of this type-specific big list from the index <code>from</code>, inclusive, to the index <code>to</code>, exclusive.
+	 *
+	 * <P>Note that this specification strengthens the one given in {@link BigList#subList(long,long)}.
+	 *
+	 * @see BigList#subList(long,long)
+	 */
+	BIG_LIST KEY_GENERIC subList( long from, long to );
+
+	/** Copies (hopefully quickly) elements of this type-specific big list into the given big array.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param a the destination big array.
+	 * @param offset the offset into the destination big array where to store the first element copied.
+	 * @param length the number of elements to be copied.
+	 */
+	void getElements( long from, KEY_TYPE a[][], long offset, long length );
+
+	/** Removes (hopefully quickly) elements of this type-specific big list.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param to the end index (exclusive).
+	 */
+	void removeElements( long from, long to );
+
+	/** Add (hopefully quickly) elements to this type-specific big list.
+	 *
+	 * @param index the index at which to add elements.
+	 * @param a the big array containing the elements.
+	 */
+	void addElements( long index, KEY_GENERIC_TYPE a[][] );
+
+	/** Add (hopefully quickly) elements to this type-specific big list.
+	 *
+	 * @param index the index at which to add elements.
+	 * @param a the big array containing the elements.
+	 * @param offset the offset of the first element to add.
+	 * @param length the number of elements to add.
+	 */
+	void addElements( long index, KEY_GENERIC_TYPE a[][], long offset, long length );
+
+#if #keys(primitive)
+
+	/**
+	 * @see List#add(int,Object)
+	 */
+	void add( long index, KEY_TYPE key );
+
+	/**
+	 * @see List#addAll(int,java.util.Collection)
+	 */
+	boolean addAll( long index, COLLECTION c );
+
+	/**
+	 * @see List#addAll(int,java.util.Collection)
+	 */
+	boolean addAll( long index, BIG_LIST c );
+
+	/**
+	 * @see List#addAll(int,java.util.Collection)
+	 */
+	boolean addAll( BIG_LIST c );
+
+	/**
+	 * @see BigList#get(long)
+	 */
+	KEY_TYPE GET_KEY( long index );
+
+	/**
+	 * @see BigList#indexOf(Object)
+	 */
+	long indexOf( KEY_TYPE k );
+
+	/**
+	 * @see BigList#lastIndexOf(Object)
+	 */
+	long lastIndexOf( KEY_TYPE k );
+
+	/**
+	 * @see BigList#remove(long)
+	 */
+	KEY_TYPE REMOVE_KEY( long index );
+
+	/**
+	 * @see BigList#set(long,Object)
+	 */
+	KEY_TYPE set( long index, KEY_TYPE k );
+
+#endif
+
+}
\ No newline at end of file
diff --git a/drv/BigListIterator.drv b/drv/BigListIterator.drv
new file mode 100644
index 0000000..bb7f043
--- /dev/null
+++ b/drv/BigListIterator.drv
@@ -0,0 +1,36 @@
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.BigListIterator;
+
+/** A type-specific {@link BigListIterator}.
+ *
+ * <p>This interface adds a skipping method that take longs.
+ *
+ * @see BigListIterator
+ */
+
+public interface KEY_BIG_LIST_ITERATOR KEY_GENERIC extends KEY_BIDI_ITERATOR KEY_GENERIC, BigListIterator<KEY_GENERIC_CLASS> {
+#if #keys(primitive)
+	void set( KEY_TYPE k );
+	void add( KEY_TYPE k );
+#endif
+	void set( KEY_GENERIC_CLASS k );
+	void add( KEY_GENERIC_CLASS k );
+}
diff --git a/drv/BigListIterators.drv b/drv/BigListIterators.drv
new file mode 100644
index 0000000..80e6463
--- /dev/null
+++ b/drv/BigListIterators.drv
@@ -0,0 +1,176 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+/** A class providing static methods and objects that do useful things with type-specific iterators.
+ *
+ * @see Iterator
+ */
+
+public class BIG_LIST_ITERATORS {
+
+	private BIG_LIST_ITERATORS() {}
+	
+	/** A class returning no elements and a type-specific big list iterator interface.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific iterator.
+	 */
+
+	public static class EmptyBigListIterator KEY_GENERIC extends KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC implements java.io.Serializable, Cloneable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected EmptyBigListIterator() {}
+
+		public boolean hasNext() { return false; }
+		public boolean hasPrevious() { return false; }
+		public KEY_GENERIC_TYPE NEXT_KEY() { throw new NoSuchElementException(); }
+		public KEY_GENERIC_TYPE PREV_KEY() { throw new NoSuchElementException(); }
+		public long nextIndex() { return 0; }
+		public long previousIndex() { return -1; }
+		public long skip( long n ) { return 0; };
+		public long back( long n ) { return 0; };
+		public Object clone() { return EMPTY_BIG_LIST_ITERATOR; }
+        private Object readResolve() { return EMPTY_BIG_LIST_ITERATOR; }
+	}
+
+	/** An empty iterator (immutable). It is serializable and cloneable.
+	 *
+	 * <P>The class of this objects represent an abstract empty iterator
+	 * that can iterate as a type-specific (list) iterator.
+	 */
+
+	@SuppressWarnings("rawtypes")
+	public final static EmptyBigListIterator EMPTY_BIG_LIST_ITERATOR = new EmptyBigListIterator();
+
+
+	/** An iterator returning a single element. */
+
+	private static class SingletonBigListIterator KEY_GENERIC extends KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC {
+		private final KEY_GENERIC_TYPE element;
+		private int curr;
+
+		public SingletonBigListIterator( final KEY_GENERIC_TYPE element ) {
+			this.element = element;
+		}
+	
+		public boolean hasNext() { return curr == 0; }
+		public boolean hasPrevious() { return curr == 1; }
+
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			curr = 1;
+			return element;
+		}
+
+		public KEY_GENERIC_TYPE PREV_KEY() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+			curr = 0;
+			return element;
+		}
+
+		public long nextIndex() {
+			return curr;
+		}
+
+		public long previousIndex() {
+			return curr - 1;
+		}
+	}
+
+
+	/** Returns an iterator that iterates just over the given element.
+	 *
+	 * @param element the only element to be returned by a type-specific list iterator.
+	 * @return  an iterator that iterates just over <code>element</code>.
+	 */
+	public static KEY_GENERIC KEY_BIG_LIST_ITERATOR KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) {
+		return new SingletonBigListIterator KEY_GENERIC( element );
+	}
+
+
+  	/** An unmodifiable wrapper class for big list iterators. */
+
+	public static class UnmodifiableBigListIterator KEY_GENERIC extends KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC {
+		final protected KEY_BIG_LIST_ITERATOR KEY_GENERIC i;
+
+		@SuppressWarnings("unchecked")
+		public UnmodifiableBigListIterator( final KEY_BIG_LIST_ITERATOR KEY_GENERIC i ) {
+			this.i = i;
+		}
+
+		public boolean hasNext() { return i.hasNext(); }
+		public boolean hasPrevious() { return i.hasPrevious(); }
+		public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); }
+		public KEY_GENERIC_TYPE PREV_KEY() { return i.PREV_KEY(); }
+		public long nextIndex() { return i.nextIndex(); }
+		public long previousIndex() { return i.previousIndex(); }
+#if #keys(primitive)
+		public KEY_GENERIC_CLASS next() { return i.next(); }
+		public KEY_GENERIC_CLASS previous() { return i.previous(); }
+#endif
+	}
+
+	/** Returns an unmodifiable list iterator backed by the specified list iterator.
+	 *
+	 * @param i the list iterator to be wrapped in an unmodifiable list iterator.
+	 * @return an unmodifiable view of the specified list iterator.
+	 */
+	public static KEY_GENERIC KEY_BIG_LIST_ITERATOR KEY_GENERIC unmodifiable( final KEY_BIG_LIST_ITERATOR KEY_GENERIC i ) { return new UnmodifiableBigListIterator KEY_GENERIC( i ); }
+
+
+	/** A class exposing a list iterator as a big-list iterator.. */
+
+	public static class BigListIteratorListIterator KEY_GENERIC extends KEY_ABSTRACT_BIG_LIST_ITERATOR KEY_GENERIC {
+		protected final KEY_LIST_ITERATOR KEY_GENERIC i;
+
+		protected BigListIteratorListIterator( final KEY_LIST_ITERATOR KEY_GENERIC i ) {
+			this.i = i;
+		}
+
+		private int intDisplacement( long n ) {
+			if ( n < Integer.MIN_VALUE || n > Integer.MAX_VALUE ) throw new IndexOutOfBoundsException( "This big iterator is restricted to 32-bit displacements" );
+			return (int)n;
+		}
+	
+		public void set( KEY_GENERIC_TYPE ok ) { i.set( ok ); }
+		public void add( KEY_GENERIC_TYPE ok ) { i.add( ok ); }
+		public int back( int n ) { return i.back( n ); }
+		public long back( long n ) { return i.back( intDisplacement( n ) ); }
+		public void remove() { i.remove(); }
+		public int skip( int n ) { return i.skip( n ); }
+		public long skip( long n ) { return i.skip( intDisplacement( n ) ); }
+		public boolean hasNext() { return i.hasNext(); }
+		public boolean hasPrevious() { return i.hasPrevious(); }
+		public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); }
+		public KEY_GENERIC_TYPE PREV_KEY() { return i.PREV_KEY(); }
+		public long nextIndex() { return i.nextIndex(); }
+		public long previousIndex() { return i.previousIndex(); }
+	}
+
+	 /** Returns a big-list iterator backed by the specified list iterator.
+	  *
+	  * @param i the list iterator to adapted to the big-list-iterator interface.
+	  * @return a big-list iterator backed by the specified list iterator.
+	  */
+	public static KEY_GENERIC KEY_BIG_LIST_ITERATOR KEY_GENERIC asBigListIterator( final KEY_LIST_ITERATOR KEY_GENERIC i ) { return new BigListIteratorListIterator KEY_GENERIC( i ); }
+}
diff --git a/drv/BigLists.drv b/drv/BigLists.drv
new file mode 100644
index 0000000..3505b7d
--- /dev/null
+++ b/drv/BigLists.drv
@@ -0,0 +1,879 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.BigList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Random;
+
+/** A class providing static methods and objects that do useful things with type-specific big lists.
+ *
+ * @see java.util.Collections
+ * @see it.unimi.dsi.fastutil.BigList
+ */
+
+public class BIG_LISTS {
+
+	private BIG_LISTS() {}
+
+	/** Shuffles the specified big list using the specified pseudorandom number generator.
+	 * 
+	 * @param l the big list to be shuffled.
+	 * @param random a pseudorandom number generator (please use a <a href="http://dsiutils.dsi.unimi.it/docs/it/unimi/dsi/util/XorShiftStarRandom.html">XorShift*</a> generator).
+	 * @return <code>l</code>.
+	 */
+	public static KEY_GENERIC BIG_LIST KEY_GENERIC shuffle( final BIG_LIST KEY_GENERIC l, final Random random ) {
+		for( long i = l.size64(); i-- != 0; ) {
+			final long p = ( random.nextLong() & 0x7FFFFFFFFFFFFFFFL ) % ( i + 1 ); 
+			final KEY_GENERIC_TYPE t = l.GET_KEY( i );
+			l.set( i, l.GET_KEY( p ) );
+			l.set( p, t );
+		}
+		return l;
+	}
+
+
+	/** An immutable class representing an empty type-specific big list.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific list.
+	 */
+
+	public static class EmptyBigList KEY_GENERIC extends COLLECTIONS.EmptyCollection KEY_GENERIC implements BIG_LIST KEY_GENERIC, java.io.Serializable, Cloneable {
+		
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected EmptyBigList() {}
+
+		public void add( final long index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } 
+		public boolean add( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_TYPE REMOVE_KEY( long i ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_TYPE set( final long index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+
+		public long indexOf( KEY_TYPE k ) { return -1; }
+		public long lastIndexOf( KEY_TYPE k ) { return -1; }
+
+		public boolean addAll( Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( long i, Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean removeAll( Collection<?> c ) { throw new UnsupportedOperationException(); }
+
+		public KEY_GENERIC_CLASS get( long i ) { throw new IndexOutOfBoundsException(); }
+
+#if #keys(primitive)
+		public boolean addAll( COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( BIG_LIST c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( long i, COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( long i, BIG_LIST c ) { throw new UnsupportedOperationException(); }
+
+		public void add( final long index, final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } 
+		public boolean add( final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_CLASS set( final long index, final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_TYPE GET_KEY( long i ) { throw new IndexOutOfBoundsException(); }
+
+		public KEY_GENERIC_CLASS remove( long k ) { throw new UnsupportedOperationException(); }
+
+		public long indexOf( Object k ) { return -1; }
+		public long lastIndexOf( Object k ) { return -1; }
+#else
+		public boolean remove( Object k ) { throw new UnsupportedOperationException(); }
+#endif
+	
+		@SuppressWarnings("unchecked")
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return BIG_LIST_ITERATORS.EMPTY_BIG_LIST_ITERATOR; }
+
+		@SuppressWarnings("unchecked")
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return BIG_LIST_ITERATORS.EMPTY_BIG_LIST_ITERATOR; }
+
+		@SuppressWarnings("unchecked")
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( long i ) { if ( i == 0 ) return BIG_LIST_ITERATORS.EMPTY_BIG_LIST_ITERATOR; throw new IndexOutOfBoundsException( String.valueOf( i ) ); }
+
+		public BIG_LIST KEY_GENERIC subList( long from, long to ) { if ( from == 0 && to == 0 ) return this; throw new IndexOutOfBoundsException(); }
+
+		public void getElements( long from, KEY_TYPE[][] a, long offset, long length ) { BIG_ARRAYS.ensureOffsetLength( a, offset, length ); if ( from != 0 ) throw new IndexOutOfBoundsException(); }
+		public void removeElements( long from, long to ) { throw new UnsupportedOperationException(); }
+
+		public void addElements( long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) { throw new UnsupportedOperationException(); }
+		public void addElements( long index, final KEY_GENERIC_TYPE a[][] ) { throw new UnsupportedOperationException(); }
+
+		public void size( long s )  { throw new UnsupportedOperationException(); }
+		public long size64() { return 0; }
+
+		public int compareTo( final BigList<? extends KEY_GENERIC_CLASS> o ) {
+			if ( o == this ) return 0;
+			return ((BigList<?>)o).isEmpty() ? 0 : -1;
+		}
+
+		private Object readResolve() { return EMPTY_BIG_LIST; }
+		public Object clone() { return EMPTY_BIG_LIST; }
+	}
+
+	/** An empty big list (immutable). It is serializable and cloneable. 
+	 *
+	 * <P>The class of this objects represent an abstract empty list
+	 * that is a sublist of any type of list. Thus, {@link #EMPTY_BIG_LIST}
+	 * may be assigned to a variable of any (sorted) type-specific list.
+	 */
+
+	@SuppressWarnings("rawtypes")
+	public static final EmptyBigList EMPTY_BIG_LIST = new EmptyBigList();
+
+
+
+	/** An immutable class representing a type-specific singleton big list. 
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific big list.
+	 */
+
+	public static class Singleton KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		private final KEY_GENERIC_TYPE element;
+	
+		private Singleton( final KEY_GENERIC_TYPE element ) {
+			this.element = element;
+		}
+	
+		public KEY_GENERIC_TYPE GET_KEY( final long i ) { if ( i == 0 ) return element; throw new IndexOutOfBoundsException(); }
+		public KEY_GENERIC_TYPE REMOVE_KEY( final long i ) { throw new UnsupportedOperationException(); }
+		public boolean contains( final KEY_TYPE k ) { return KEY_EQUALS( k, element ); }
+	
+		public boolean addAll( final Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final long i, final Collection <? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean removeAll( final Collection<?> c ) { throw new UnsupportedOperationException(); }
+		public boolean retainAll( final Collection<?> c ) { throw new UnsupportedOperationException(); }
+	
+		/* Slightly optimized w.r.t. the one in ABSTRACT_SET. */
+	
+		public KEY_TYPE[] TO_KEY_ARRAY() {
+			KEY_TYPE a[] = new KEY_TYPE[ 1 ];
+			a[ 0 ] = element;
+			return a;
+		}
+	
+		@SuppressWarnings("unchecked")
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return BIG_LIST_ITERATORS.singleton( element ); }
+
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); }
+
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( long i ) { 
+			if ( i > 1 || i < 0 ) throw new  IndexOutOfBoundsException();
+			KEY_BIG_LIST_ITERATOR KEY_GENERIC l = listIterator();
+			if ( i == 1 ) l.next();
+			return l;
+		}
+
+		@SuppressWarnings("unchecked")
+		public BIG_LIST KEY_GENERIC subList( final long from, final long to ) {
+			ensureIndex( from );
+			ensureIndex( to );
+			if ( from > to ) throw new IndexOutOfBoundsException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+			
+			if ( from != 0 || to != 1 ) return EMPTY_BIG_LIST;
+			return this;
+		}
+		
+		@Deprecated
+		public int size() { return 1; }
+		public long size64() { return 1; }
+		public void size( final long size ) { throw new UnsupportedOperationException(); }
+		public void clear() { throw new UnsupportedOperationException(); }
+	
+		public Object clone() { return this; }
+
+#if #keys(primitive)
+		public boolean rem( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final long i, final COLLECTION c ) { throw new UnsupportedOperationException(); }
+#else
+		public boolean remove( Object k ) { throw new UnsupportedOperationException(); }
+#endif
+
+	}
+
+	/** Returns a type-specific immutable big list containing only the specified element. The returned big list is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned big list.
+	 * @return a type-specific immutable big list containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC BIG_LIST KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) { return new Singleton KEY_GENERIC( element ); }
+
+#if ! #keys(reference)
+
+	/** Returns a type-specific immutable big list containing only the specified element. The returned big list is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned big list.
+	 * @return a type-specific immutable big list containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC BIG_LIST KEY_GENERIC singleton( final Object element ) { return new Singleton KEY_GENERIC( KEY_OBJ2TYPE( element ) ); }
+
+#endif
+
+
+	/** A synchronized wrapper class for big lists. */
+
+	public static class SynchronizedBigList KEY_GENERIC extends COLLECTIONS.SynchronizedCollection KEY_GENERIC implements BIG_LIST KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final BIG_LIST KEY_GENERIC list; // Due to the large number of methods that are not in COLLECTION, this is worth caching.
+
+		protected SynchronizedBigList( final BIG_LIST KEY_GENERIC l, final Object sync ) {
+			super( l, sync );
+			this.list = l;
+		}
+
+		protected SynchronizedBigList( final BIG_LIST KEY_GENERIC l ) {
+			super( l );
+			this.list = l;
+		}
+
+		public KEY_GENERIC_TYPE GET_KEY( final long i ) { synchronized( sync ) { return list.GET_KEY( i ); } }
+		public KEY_GENERIC_TYPE set( final long i, final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return list.set( i, k ); } }
+		public void add( final long i, final KEY_GENERIC_TYPE k ) { synchronized( sync ) { list.add( i, k ); } }
+		public KEY_GENERIC_TYPE REMOVE_KEY( final long i ) { synchronized( sync ) { return list.REMOVE_KEY( i ); } }
+
+		public long indexOf( final KEY_TYPE k ) { synchronized( sync ) { return list.indexOf( k ); } }
+		public long lastIndexOf( final KEY_TYPE k ) { synchronized( sync ) { return list.lastIndexOf( k ); } }
+
+		public boolean addAll( final long index, final Collection<? extends KEY_GENERIC_CLASS> c ) { synchronized( sync ) { return list.addAll( index, c ); } }
+
+		public void getElements( final long from, final KEY_TYPE a[][], final long offset, final long length ) { synchronized( sync ) { list.getElements( from, a, offset, length ); } }
+		public void removeElements( final long from, final long to ) { synchronized( sync ) { list.removeElements( from, to ); } }
+		public void addElements( long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) { synchronized( sync ) { list.addElements( index, a, offset, length ); } }
+		public void addElements( long index, final KEY_GENERIC_TYPE a[][] ) { synchronized( sync ) { list.addElements( index, a ); } }
+		public void size( final long size ) { synchronized( sync ) { list.size( size ); } }
+		public long size64() { synchronized( sync ) { return list.size64(); } }
+
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return list.listIterator(); }
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return list.listIterator(); }
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long i ) { return list.listIterator( i ); }
+
+		public BIG_LIST KEY_GENERIC subList( final long from, final long to ) { synchronized( sync ) { return synchronize( list.subList( from, to ), sync ); } }
+
+		public boolean equals( final Object o ) { synchronized( sync ) { return list.equals( o ); } }
+		public int hashCode() { synchronized( sync ) { return list.hashCode(); } }
+
+#if ! #keyclass(Reference)
+		public int compareTo( final BigList<? extends KEY_GENERIC_CLASS> o ) { synchronized( sync ) { return list.compareTo( o ); } }
+#endif
+
+#if #keys(primitive)
+		public boolean addAll( final long index, final COLLECTION c ) { synchronized( sync ) { return list.addAll( index, c ); } }
+		public boolean addAll( final long index, BIG_LIST l ) { synchronized( sync ) { return list.addAll( index, l ); } }
+		public boolean addAll( BIG_LIST l ) { synchronized( sync ) { return list.addAll( l ); } }
+
+		public KEY_GENERIC_CLASS get( final long i ) { synchronized( sync ) { return list.get( i ); } }
+		public void add( final long i, KEY_GENERIC_CLASS k ) { synchronized( sync ) { list.add( i, k ); } }
+		public KEY_GENERIC_CLASS set( final long index, KEY_GENERIC_CLASS k ) { synchronized( sync ) { return list.set( index, k ); } }
+		public KEY_GENERIC_CLASS remove( final long i ) { synchronized( sync ) { return list.remove( i ); } }
+		public long indexOf( final Object o ) { synchronized( sync ) { return list.indexOf( o ); } }
+		public long lastIndexOf( final Object o ) { synchronized( sync ) { return list.lastIndexOf( o ); } }
+#endif
+	}
+
+
+	/** Returns a synchronized type-specific big list backed by the given type-specific big list.
+	 *
+	 * @param l the big list to be wrapped in a synchronized big list.
+	 * @return a synchronized view of the specified big list.
+	 * @see java.util.Collections#synchronizedList(List)
+	 */
+	public static KEY_GENERIC BIG_LIST KEY_GENERIC synchronize( final BIG_LIST KEY_GENERIC l ) { return new SynchronizedBigList KEY_GENERIC( l ); }
+
+	/** Returns a synchronized type-specific big list backed by the given type-specific big list, using an assigned object to synchronize.
+	 *
+	 * @param l the big list to be wrapped in a synchronized big list.
+	 * @param sync an object that will be used to synchronize the access to the big list.
+	 * @return a synchronized view of the specified big list.
+	 * @see java.util.Collections#synchronizedList(List)
+	 */
+
+	public static KEY_GENERIC BIG_LIST KEY_GENERIC synchronize( final BIG_LIST KEY_GENERIC l, final Object sync ) { return new SynchronizedBigList KEY_GENERIC( l, sync ); }
+
+
+
+	/** An unmodifiable wrapper class for big lists. */
+
+	public static class UnmodifiableBigList KEY_GENERIC extends COLLECTIONS.UnmodifiableCollection KEY_GENERIC implements BIG_LIST KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final BIG_LIST KEY_GENERIC list; // Due to the large number of methods that are not in COLLECTION, this is worth caching.
+
+		protected UnmodifiableBigList( final BIG_LIST KEY_GENERIC l ) {
+			super( l );
+			this.list = l;
+		}
+
+		public KEY_GENERIC_TYPE GET_KEY( final long i ) { return list.GET_KEY( i ); }
+		public KEY_GENERIC_TYPE set( final long i, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public void add( final long i, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_TYPE REMOVE_KEY( final long i ) { throw new UnsupportedOperationException(); }
+
+		public long indexOf( final KEY_TYPE k ) { return list.indexOf( k ); }
+		public long lastIndexOf( final KEY_TYPE k ) { return list.lastIndexOf( k ); }
+
+		public boolean addAll( final long index, final Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+
+		public void getElements( final long from, final KEY_TYPE a[][], final long offset, final long length ) { list.getElements( from, a, offset, length ); }
+		public void removeElements( final long from, final long to ) { throw new UnsupportedOperationException(); }
+		public void addElements( long index, final KEY_GENERIC_TYPE a[][], long offset, long length ) { throw new UnsupportedOperationException(); }
+		public void addElements( long index, final KEY_GENERIC_TYPE a[][] ) { throw new UnsupportedOperationException(); }
+		public void size( final long size ) { list.size( size ); }
+		public long size64() { return list.size64(); }
+
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); }
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return BIG_LIST_ITERATORS.unmodifiable( list.listIterator() ); }
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long i ) { return BIG_LIST_ITERATORS.unmodifiable( list.listIterator( i ) ); }
+
+		public BIG_LIST KEY_GENERIC subList( final long from, final long to ) { return unmodifiable( list.subList( from, to ) ); }
+
+		public boolean equals( final Object o ) { return list.equals( o ); }
+		public int hashCode() { return list.hashCode(); }
+
+#if ! #keyclass(Reference)
+		public int compareTo( final BigList<? extends KEY_GENERIC_CLASS> o ) { return list.compareTo( o ); }
+#endif
+
+#if #keys(primitive)
+		public boolean addAll( final long index, final COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final BIG_LIST l ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final long index, final BIG_LIST l ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_CLASS get( final long i ) { return list.get( i ); }
+		public void add( final long i, KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_CLASS set( final long index, KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_CLASS remove( final long i ) { throw new UnsupportedOperationException(); }
+		public long indexOf( final Object o ) { return list.indexOf( o ); }
+		public long lastIndexOf( final Object o ) { return list.lastIndexOf( o ); }
+#endif
+	}
+
+
+	/** Returns an unmodifiable type-specific big list backed by the given type-specific big list.
+	 *
+	 * @param l the big list to be wrapped in an unmodifiable big list.
+	 * @return an unmodifiable view of the specified big list.
+	 * @see java.util.Collections#unmodifiableList(List)
+	 */
+	public static KEY_GENERIC BIG_LIST KEY_GENERIC unmodifiable( final BIG_LIST KEY_GENERIC l ) { return new UnmodifiableBigList KEY_GENERIC( l ); }
+
+	/** A class exposing a list as a big list. */
+
+	public static class ListBigList KEY_GENERIC extends ABSTRACT_BIG_LIST KEY_GENERIC implements java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		private final LIST KEY_GENERIC list;
+	
+		protected ListBigList( final LIST KEY_GENERIC list ) {
+			this.list = list;
+		}
+	
+		private int intIndex( long index ) {
+			if ( index >= Integer.MAX_VALUE ) throw new IndexOutOfBoundsException( "This big list is restricted to 32-bit indices" );
+			return (int)index;
+		}
+	
+		public long size64() { return list.size(); }
+		@Deprecated
+		public int size() { return list.size(); }
+		public void size( final long size ) { list.size( intIndex( size ) ); }
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC iterator() { return BIG_LIST_ITERATORS.asBigListIterator( list.iterator() ); }
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator() { return BIG_LIST_ITERATORS.asBigListIterator( list.listIterator() ); }
+		public boolean addAll( final long index, final Collection<? extends KEY_GENERIC_CLASS> c ) { return list.addAll( intIndex( index ), c ); }
+		public KEY_BIG_LIST_ITERATOR KEY_GENERIC listIterator( final long index ) { return BIG_LIST_ITERATORS.asBigListIterator( list.listIterator( intIndex( index ) ) ); }
+		public BIG_LIST KEY_GENERIC subList( long from, long to ) { return new ListBigList KEY_GENERIC( list.subList( intIndex( from ), intIndex( to ) ) ); }
+		public boolean contains( final KEY_TYPE key ) { return list.contains( key ); }
+		public KEY_TYPE[] TO_KEY_ARRAY() { return list.TO_KEY_ARRAY(); }
+		public void removeElements( final long from, final long to ) { list.removeElements( intIndex( from ), intIndex( to ) ); }
+#if #keys(primitive)
+		public KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE[] a ) { return list.TO_KEY_ARRAY( a ); }
+#endif
+		public void add( long index, KEY_GENERIC_TYPE key ) { list.add( intIndex( index ), key ); }
+		public boolean addAll( long index, COLLECTION KEY_GENERIC c ) { return list.addAll( intIndex( index ), c ); }
+		public boolean addAll( long index, BIG_LIST KEY_GENERIC c ) { return list.addAll( intIndex( index ), c ); }
+		public boolean add( KEY_GENERIC_TYPE key ) { return list.add( key ); }
+		public boolean addAll( BIG_LIST KEY_GENERIC c ) { return list.addAll( c ); }
+		public KEY_GENERIC_TYPE GET_KEY( long index ) { return list.GET_KEY( intIndex( index ) ); }
+		public long indexOf( KEY_TYPE k ) { return list.indexOf( k ); }
+		public long lastIndexOf( KEY_TYPE k ) { return list.lastIndexOf( k ); }
+		public KEY_GENERIC_TYPE REMOVE_KEY( long index ) { return list.REMOVE_KEY( intIndex( index ) ); }
+		public KEY_GENERIC_TYPE set( long index, KEY_GENERIC_TYPE k ) { return list.set( intIndex( index ), k ); }
+		public boolean addAll( COLLECTION KEY_GENERIC c ) { return list.addAll( c ); }
+		public boolean containsAll( COLLECTION KEY_GENERIC c ) { return list.containsAll( c ); }
+		public boolean removeAll( COLLECTION KEY_GENERIC c ) { return list.removeAll( c ); }
+		public boolean retainAll( COLLECTION KEY_GENERIC c ) { return list.retainAll( c ); }
+		public boolean isEmpty() { return list.isEmpty(); }
+		public <T> T[] toArray( T[] a ) { return list.toArray( a ); }
+		public boolean containsAll( Collection<?> c ) { return list.containsAll( c ); }
+		public boolean addAll( Collection<? extends KEY_GENERIC_CLASS> c ) { return list.addAll( c ); }
+		public boolean removeAll( Collection<?> c ) { return list.removeAll( c ); }
+		public boolean retainAll( Collection<?> c ) { return list.retainAll( c ); }
+		public void clear() { list.clear(); }
+		public int hashCode() { return list.hashCode(); }
+	}
+	
+	/** Returns a big list backed by the specified list.
+	*
+	* @param list a list.
+	* @return a big list backed by the specified list.
+	*/
+	public static KEY_GENERIC BIG_LIST KEY_GENERIC asBigList( final LIST KEY_GENERIC list ) { return new ListBigList KEY_GENERIC( list ); }
+
+
+
+
+#ifdef TEST
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte ) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static void testLists( KEY_TYPE k, BIG_LIST m, BIG_LIST t, int level ) {
+		int n = 100;
+		int c;
+
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsIndex, tThrowsIndex, mThrowsUnsupp, tThrowsUnsupp;
+		boolean rt = false, rm = false;
+		Object Rt = null, Rm = null;
+
+		if ( level == 0 ) return;
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+		/* Now we check that m actually holds that data. */
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(java.util.Iterator i=m.listIterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" );
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				m.contains(T);
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): contains() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex) ensure( m.contains(KEY2OBJ(T)) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in keys between t and m (polymorphic method) " + m );
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				m.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): contains() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): contains() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( m.contains(KEY2OBJ(T)) ==  t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence between t and m (standard method) " + m );
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.add(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.add(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): add() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): add() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): add() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): add() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in add() between t and m " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			if ( ! KEY_EQUALS( T, k ) && mThrowsUnsupp && ! tThrowsUnsupp ) mThrowsUnsupp = true; // Stupid bug in Collections.singleton()
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): remove() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): remove() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in remove() between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal " + m );
+
+		/* Now we add and remove random data in m and t at specific positions, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			int pos = r.nextInt( 2 );
+
+			try {
+				m.add(pos, KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				t.add(pos, KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): add() at " + pos + " divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): add() at " + pos + " divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): add() at " + pos + " divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): add() at " + pos + " divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+			
+			pos = r.nextInt( 2 );
+
+			try {
+				Rm = m.remove(pos);
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				Rt = t.remove(pos);
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() at " + pos + " divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() at " + pos + " divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): remove() at " + pos + " divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): remove() at " + pos + " divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( Rm == Rt || Rm != null && Rm.equals(Rt), "Error (" + level + ", " + seed + "): divergence in remove() at " + pos + " between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal " + m );
+
+		/* Now we add and remove random collections in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.addAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.addAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): addAll() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): addAll() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): addAll() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): addAll() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in addAll() between t and m " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.removeAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.removeAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): removeAll() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): removeAll() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): removeAll() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): removeAll() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in removeAll() between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after set removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after set removal " + m );
+
+		/* Now we add random collections at specific positions in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			int pos = r.nextInt( 2 );
+
+			try {
+				rm = m.addAll(pos, java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.addAll(pos, java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): addAll() at " + pos + " divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): addAll() at " + pos + " divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): addAll() at " + pos + " divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): addAll() at " + pos + " divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in addAll() at " + pos + " between t and m " + m );
+
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after set removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after set removal " + m );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.listIterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on m)" );
+		}
+		
+		if ( m instanceof Singleton ) {
+			ensure( m.equals( ((Singleton)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((Singleton)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+		}
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		BIG_LIST m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (BIG_LIST)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if ! #keyclass(Reference)
+
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+#endif
+
+		if ( ! m.isEmpty() ) {
+			long start = (r.nextLong() & 0x7FFFFFFFFFFFFFFFL) % m.size64();
+			long end = start + (r.nextLong() & 0x7FFFFFFFFFFFFFFFL) % ( m.size64() - start );
+			//System.err.println("Checking subList from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testLists( k, m.subList( start, end ), t.subList( start, end ), level - 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after subList" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subList" );
+
+		}
+
+		return;
+	}
+
+	private static void test() {
+		KEY_TYPE k = genKey();
+		BIG_LIST m = new Singleton( k );
+		BIG_LIST u = BIG_LISTS.unmodifiable( BIG_LISTS.asBigList( LISTS.singleton( KEY2OBJ( k ) ) ) );
+		testLists( k, m, u, 3 );
+		System.out.println("Test OK");
+	}
+	
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	/** This method expects as first argument a lower-cased type (e.g., "int"),
+	 * and as second optional argument a seed. */
+
+	public static void main( String arg[] ) throws Exception {
+		if ( arg.length > 1 ) r = new java.util.Random( seed = Long.parseLong( arg[ 1 ] ) );
+		  
+		try {
+			test();
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+	
+#endif
+	
+}
diff --git a/drv/BinIO.drv b/drv/BinIO.drv
new file mode 100644
index 0000000..6b9ed8e
--- /dev/null
+++ b/drv/BinIO.drv
@@ -0,0 +1,166 @@
+/*		 
+ * Copyright (C) 2005-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package it.unimi.dsi.fastutil.io;
+
+import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_MASK;
+import static it.unimi.dsi.fastutil.BigArrays.start;
+import static it.unimi.dsi.fastutil.BigArrays.segment;
+import static it.unimi.dsi.fastutil.BigArrays.displacement;
+import java.io.*;
+import java.util.*;
+import it.unimi.dsi.fastutil.booleans.*;
+import it.unimi.dsi.fastutil.bytes.*;
+import it.unimi.dsi.fastutil.shorts.*;
+import it.unimi.dsi.fastutil.chars.*;
+import it.unimi.dsi.fastutil.ints.*;
+import it.unimi.dsi.fastutil.longs.*;
+import it.unimi.dsi.fastutil.floats.*;
+import it.unimi.dsi.fastutil.doubles.*;
+
+/** Provides static methods to perform easily binary I/O.
+ *
+ * <P>This class fills some gaps in the Java API. First of all, you have two
+ * buffered, easy-to-use methods to {@linkplain #storeObject(Object,CharSequence) store an object to a file}
+ * or {@linkplain #loadObject(CharSequence) load an object from a file},
+ * and two
+ * buffered, easy-to-use methods to {@linkplain #storeObject(Object,OutputStream) store an object to an output stream}
+ * or to {@linkplain #loadObject(InputStream) load an object from an input stream}.
+ *
+ * <p>Second, a natural operation on sequences of primitive elements is to load or
+ * store them in binary form using the {@link DataInput} conventions.  This
+ * method is much more flexible than storing arrays as objects, as it allows
+ * for partial load, partial store, and makes it easy to read the
+ * resulting files from other languages.
+ * 
+ * <P>For each primitive type, this class provides methods that read elements
+ * from a {@link DataInput} or from a filename into an array. Analogously, there are
+ * methods that store the content of an array (fragment) or the elements
+ * returned by an iterator to a {@link DataOutput} or to a given filename. Files
+ * are buffered using {@link FastBufferedInputStream} and {@link FastBufferedOutputStream}.
+ *
+ * <P>Since bytes can be read from or written to any stream, additional methods
+ * makes it possible to {@linkplain #loadBytes(InputStream,byte[]) load bytes from} and
+ * {@linkplain #storeBytes(byte[],OutputStream) store bytes to} a stream. Such methods
+ * use the bulk-read methods of {@link InputStream} and {@link OutputStream}, but they
+ * also include a workaround for <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6478546">bug #6478546</a>.
+ *
+ * <P>Finally, there are useful wrapper methods that {@linkplain #asIntIterator(CharSequence)
+ * exhibit a file as a type-specific iterator}.
+ * 
+ * @since 4.4
+ */
+
+public class BinIO {
+
+	private BinIO() {}
+
+	/** Stores an object in a file given by a {@link File} object.
+	 *
+	 * @param o an object.
+	 * @param file a file.
+	 * @see #loadObject(File)
+	 */
+	public static void storeObject( final Object o, final File file ) throws IOException {
+		final ObjectOutputStream oos = new ObjectOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+		oos.writeObject( o );
+		oos.close();
+	}
+
+	/** Stores an object in a file given by a pathname.
+	 *
+	 * @param o an object.
+	 * @param filename a filename.
+	 * @see #loadObject(CharSequence)
+	 */
+
+	public static void storeObject( final Object o, final CharSequence filename ) throws IOException {
+		storeObject( o, new File( filename.toString() ) );
+	}
+
+	/** Loads an object from a file given by a {@link File} object.
+	 *
+	 * @param file a file.
+	 * @return the object stored under the given file.
+	 * @see #storeObject(Object, File)
+	 */
+	public static Object loadObject( final File file ) throws IOException, ClassNotFoundException {
+		final ObjectInputStream ois = new ObjectInputStream( new FastBufferedInputStream( new FileInputStream( file ) ) );
+		final Object result = ois.readObject();
+		ois.close();
+		return result;
+	}
+
+	/** Loads an object from a file given by a pathname.
+	 *
+	 * @param filename a filename.
+	 * @return the object stored under the given filename.
+	 * @see #storeObject(Object, CharSequence)
+	 */
+	public static Object loadObject( final CharSequence filename ) throws IOException, ClassNotFoundException {
+		return loadObject( new File( filename.toString() ) );
+	}
+
+	/** Stores an object in a given output stream.
+	 *
+	 * This methods buffers <code>s</code>, and flushes all wrappers after
+	 * calling <code>writeObject()</code>, but does not close <code>s</code>.
+	 *
+	 * @param o an object.
+	 * @param s an output stream.
+	 * @see #loadObject(InputStream)
+	 */
+
+	public static void storeObject( final Object o, final OutputStream s ) throws IOException {
+		@SuppressWarnings("resource")
+		final ObjectOutputStream oos = new ObjectOutputStream( new FastBufferedOutputStream( s ) );
+		oos.writeObject( o );
+		oos.flush();
+	}
+
+	/** Loads an object from a given input stream.
+	 *
+	 * <p><STRONG>Warning</STRONG>: this method buffers the input stream. As a consequence,
+	 * subsequent reads from the same stream may not give the desired results, as bytes
+	 * may have been read by the internal buffer, but not used by <code>readObject()</code>.
+	 * This is a feature, as this method is targeted at one-shot reading from streams,
+	 * e.g., reading exactly one object from {@link System#in}.
+	 *
+	 * @param s an input stream.
+	 * @return the object read from the given input stream.
+	 * @see #storeObject(Object, OutputStream)
+	 */
+	public static Object loadObject( final InputStream s ) throws IOException, ClassNotFoundException {
+		@SuppressWarnings("resource")
+		final ObjectInputStream ois = new ObjectInputStream( new FastBufferedInputStream( s ) );
+		final Object result = ois.readObject();
+		return result;
+	}
+
+
+
+
+#include "src/it/unimi/dsi/fastutil/io/BooleanBinIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/ByteBinIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/ShortBinIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/CharBinIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/IntBinIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/LongBinIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/FloatBinIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/DoubleBinIOFragment.h"
+
+}
diff --git a/drv/BinIOFragment.drv b/drv/BinIOFragment.drv
new file mode 100644
index 0000000..eecbc79
--- /dev/null
+++ b/drv/BinIOFragment.drv
@@ -0,0 +1,867 @@
+/*		 
+ * Copyright (C) 2004-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+
+#if #keyclass(Byte)
+
+// HORRIBLE kluges to work around bug #6478546
+
+private final static int MAX_IO_LENGTH = 1024 * 1024;
+
+private static int read( final InputStream is, final byte a[], final int offset, final int length ) throws IOException { 
+	if ( length == 0 ) return 0;
+
+	int read = 0, result;
+	do {
+		result = is.read( a, offset + read, Math.min( length - read, MAX_IO_LENGTH ) );
+		if ( result < 0 ) return read;
+		read += result;
+	} while( read < length );
+
+	return read;
+}
+
+private static void write( final OutputStream outputStream, final byte a[], final int offset, final int length ) throws IOException { 
+	int written = 0;
+	while( written < length ) { 
+		outputStream.write( a, offset + written, Math.min( length - written, MAX_IO_LENGTH ) );
+		written += Math.min( length - written, MAX_IO_LENGTH );
+	}
+}
+
+private static void write( final DataOutput dataOutput, final byte a[], final int offset, final int length ) throws IOException { 
+	int written = 0;
+	while( written < length ) { 
+		dataOutput.write( a, offset + written, Math.min( length - written, MAX_IO_LENGTH ) );
+		written += Math.min( length - written, MAX_IO_LENGTH );
+	}
+}
+
+// Additional read/write methods to work around the DataInput/DataOutput schizophrenia.
+
+/** Loads bytes from a given input stream, storing them in a given array fragment.
+ *
+ * <p>Note that this method is going to be significantly faster than {@link #loadBytes(DataInput,byte[],int,int)}
+ * as it uses {@link InputStream}'s bulk-read methods.
+ *
+ * @param inputStream an input stream.
+ * @param array an array which will be filled with data from <code>inputStream</code>.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from <code>inputStream</code> (it might be less than <code>length</code> if <code>inputStream</code> ends).
+ */
+public static int LOAD_KEYS( final InputStream inputStream, final KEY_TYPE[] array, final int offset, final int length ) throws IOException {
+	return read( inputStream, array, offset, length );
+}
+
+/** Loads bytes from a given input stream, storing them in a given array.
+ *
+ * <p>Note that this method is going to be significantly faster than {@link #loadBytes(DataInput,byte[])}
+ * as it uses {@link InputStream}'s bulk-read methods.
+ *
+ * @param inputStream an input stream.
+ * @param array an array which will be filled with data from <code>inputStream</code>.
+ * @return the number of elements actually read from <code>inputStream</code> (it might be less than the array length if <code>inputStream</code> ends).
+ */
+public static int LOAD_KEYS( final InputStream inputStream, final KEY_TYPE[] array ) throws IOException {
+	return read( inputStream, array, 0, array.length );
+}
+
+/** Stores an array fragment to a given output stream.
+ *
+ * <p>Note that this method is going to be significantly faster than {@link #storeBytes(byte[],int,int,DataOutput)}
+ * as it uses {@link OutputStream}'s bulk-read methods.
+ *
+ * @param array an array whose elements will be written to <code>outputStream</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param outputStream an output stream.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final OutputStream outputStream ) throws IOException {
+	write( outputStream, array, offset, length );
+}
+
+/** Stores an array to a given output stream.
+ *
+ * <p>Note that this method is going to be significantly faster than {@link #storeBytes(byte[],DataOutput)}
+ * as it uses {@link OutputStream}'s bulk-read methods.
+ *
+ * @param array an array whose elements will be written to <code>outputStream</code>.
+ * @param outputStream an output stream.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final OutputStream outputStream ) throws IOException {
+	write( outputStream, array, 0, array.length );
+}
+
+
+private static long read( final InputStream is, final byte a[][], final long offset, final long length ) throws IOException { 
+	if ( length == 0 ) return 0;
+
+	long read = 0;
+	int segment = segment( offset );
+	int displacement = displacement( offset );
+	int result;
+	do {
+		result = is.read( a[ segment ], displacement, (int)Math.min( a[ segment ].length - displacement, Math.min( length - read, MAX_IO_LENGTH ) ) );
+		if ( result < 0 ) return read;
+		read += result;
+		displacement += result;
+		if ( displacement == a[ segment ].length ) {
+			segment++;
+			displacement = 0;
+		}
+	} while( read < length );
+
+	return read;
+}
+
+private static void write( final OutputStream outputStream, final byte a[][], final long offset, final long length ) throws IOException { 
+	if ( length == 0 ) return;
+	long written = 0;
+	int toWrite;
+	int segment = segment( offset );
+	int displacement = displacement( offset );
+	do {
+		toWrite = (int)Math.min( a[ segment ].length - displacement, Math.min( length - written, MAX_IO_LENGTH ) );
+		outputStream.write( a[ segment ], displacement, toWrite );
+		written += toWrite;
+		displacement += toWrite;
+		if ( displacement == a[ segment ].length ) {
+			segment++;
+			displacement = 0;
+		}
+	} while( written < length );
+}
+
+private static void write( final DataOutput dataOutput, final byte a[][], final long offset, final long length ) throws IOException { 
+	if ( length == 0 ) return;
+	long written = 0;
+	int toWrite;
+	int segment = segment( offset );
+	int displacement = displacement( offset );
+	do {
+		toWrite = (int)Math.min( a[ segment ].length - displacement, Math.min( length - written, MAX_IO_LENGTH ) );
+		dataOutput.write( a[ segment ], displacement, toWrite );
+		written += toWrite;
+		displacement += toWrite;
+		if ( displacement == a[ segment ].length ) {
+			segment++;
+			displacement = 0;
+		}
+	} while( written < length );
+}
+
+// Additional read/write methods to work around the DataInput/DataOutput schizophrenia.
+
+
+/** Loads bytes from a given input stream, storing them in a given big-array fragment.
+ *
+ * <p>Note that this method is going to be significantly faster than {@link #loadBytes(DataInput,byte[][],long,long)}
+ * as it uses {@link InputStream}'s bulk-read methods.
+ *
+ * @param inputStream an input stream.
+ * @param array a big array which will be filled with data from <code>inputStream</code>.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from <code>inputStream</code> (it might be less than <code>length</code> if <code>inputStream</code> ends).
+ */
+public static long LOAD_KEYS( final InputStream inputStream, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException {
+	return read( inputStream, array, offset, length );
+}
+
+/** Loads bytes from a given input stream, storing them in a given big array.
+ *
+ * <p>Note that this method is going to be significantly faster than {@link #loadBytes(DataInput,byte[][])}
+ * as it uses {@link InputStream}'s bulk-read methods.
+ *
+ * @param inputStream an input stream.
+ * @param array a big array which will be filled with data from <code>inputStream</code>.
+ * @return the number of elements actually read from <code>inputStream</code> (it might be less than the array length if <code>inputStream</code> ends).
+ */
+public static long LOAD_KEYS( final InputStream inputStream, final KEY_TYPE[][] array ) throws IOException {
+	return read( inputStream, array, 0, BIG_ARRAYS.length( array ) );
+}
+
+/** Stores a big-array fragment to a given output stream.
+ *
+ * <p>Note that this method is going to be significantly faster than {@link #storeBytes(byte[][],long,long,DataOutput)}
+ * as it uses {@link OutputStream}'s bulk-read methods.
+ *
+ * @param array a big array whose elements will be written to <code>outputStream</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param outputStream an output stream.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final OutputStream outputStream ) throws IOException {
+	write( outputStream, array, offset, length );
+}
+
+/** Stores a big array to a given output stream.
+ *
+ * <p>Note that this method is going to be significantly faster than {@link #storeBytes(byte[][],DataOutput)}
+ * as it uses {@link OutputStream}'s bulk-read methods.
+ *
+ * @param array a big array whose elements will be written to <code>outputStream</code>.
+ * @param outputStream an output stream.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final OutputStream outputStream ) throws IOException {
+	write( outputStream, array, 0, BIG_ARRAYS.length( array ) );
+}
+
+#endif
+
+
+/** Loads elements from a given data input, storing them in a given array fragment.
+ *
+ * @param dataInput a data input.
+ * @param array an array which will be filled with data from <code>dataInput</code>.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from <code>dataInput</code> (it might be less than <code>length</code> if <code>dataInput</code> ends).
+ */
+public static int LOAD_KEYS( final DataInput dataInput, final KEY_TYPE[] array, final int offset, final int length ) throws IOException {
+	PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length );
+	int i = 0;
+	try {
+		for( i = 0; i < length; i++ ) array[ i + offset ] = dataInput.READ_KEY();
+	}
+	catch( EOFException itsOk ) {}
+	return i;
+}
+
+/** Loads elements from a given data input, storing them in a given array.
+ *
+ * @param dataInput a data input.
+ * @param array an array which will be filled with data from <code>dataInput</code>.
+ * @return the number of elements actually read from <code>dataInput</code> (it might be less than the array length if <code>dataInput</code> ends).
+ */
+public static int LOAD_KEYS( final DataInput dataInput, final KEY_TYPE[] array ) throws IOException {
+	int i = 0;
+	try {
+		final int length = array.length;
+		for( i = 0; i < length; i++ ) array[ i ] = dataInput.READ_KEY();
+	}
+	catch( EOFException itsOk ) {}
+	return i;
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a given array fragment.
+ *
+ * @param file a file.
+ * @param array an array which will be filled with data from the specified file.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from the given file (it might be less than <code>length</code> if the file is too short).
+ */
+public static int LOAD_KEYS( final File file, final KEY_TYPE[] array, final int offset, final int length ) throws IOException {
+	PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length );
+
+	final FileInputStream fis = new FileInputStream( file );
+#if #keyclass(Byte)
+	final int result = read( fis, array, offset, length );
+	fis.close();
+	return result;
+#else
+	final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) );
+
+	int i = 0;
+	try {
+		for( i = 0; i < length; i++ ) array[ i + offset ] = dis.READ_KEY();
+	}
+	catch( EOFException itsOk ) {}
+
+	dis.close();
+	return i;
+#endif
+}
+
+/** Loads elements from a file given by a pathname, storing them in a given array fragment.
+ *
+ * @param filename a filename.
+ * @param array an array which will be filled with data from the specified file.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from the given file (it might be less than <code>length</code> if the file is too short).
+ */
+public static int LOAD_KEYS( final CharSequence filename, final KEY_TYPE[] array, final int offset, final int length ) throws IOException {
+	return LOAD_KEYS( new File( filename.toString() ), array, offset, length );
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a given array.
+ *
+ * @param file a file.
+ * @param array an array which will be filled with data from the specified file.
+ * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short).
+ */
+public static int LOAD_KEYS( final File file, final KEY_TYPE[] array ) throws IOException {
+	final FileInputStream fis = new FileInputStream( file );
+#if #keyclass(Byte)
+	final int result = read( fis, array, 0, array.length );
+	fis.close();
+	return result;
+#else
+	final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) );
+
+	int i = 0;
+	try {
+		final int length = array.length;
+		for( i = 0; i < length; i++ ) array[ i ] = dis.READ_KEY();
+	}
+	catch( EOFException itsOk ) {}
+
+	dis.close();
+
+	return i;
+#endif
+}
+
+/** Loads elements from a file given by a pathname, storing them in a given array.
+ *
+ * @param filename a filename.
+ * @param array an array which will be filled with data from the specified file.
+ * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short).
+ */
+public static int LOAD_KEYS( final CharSequence filename, final KEY_TYPE[] array ) throws IOException {
+	return LOAD_KEYS( new File( filename.toString() ), array );
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a new array.
+ *
+ * <P>Note that the length of the returned array will be computed 
+ * dividing the specified file size by the number of bytes used to
+ * represent each element.
+ *
+ * @param file a file.
+ * @return an array filled with the content of the specified file.
+ */
+public static KEY_TYPE[] LOAD_KEYS( final File file ) throws IOException {
+	final FileInputStream fis = new FileInputStream( file );
+
+#if #keyclass(Boolean)
+	final long length = fis.getChannel().size();
+#else
+	final long length = fis.getChannel().size() / ( KEY_CLASS.SIZE / 8 );
+#endif
+
+	if ( length > Integer.MAX_VALUE ) {
+		fis.close();
+		throw new IllegalArgumentException( "File too long: " + fis.getChannel().size()+ " bytes (" + length + " elements)" ); 
+	}
+
+	final KEY_TYPE[] array = new KEY_TYPE[ (int)length ];
+
+#if #keyclass(Byte)
+	if ( read( fis, array, 0, (int)length ) < length ) throw new EOFException();
+	fis.close();
+#else
+	final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) );
+	for( int i = 0; i < length; i++ ) array[ i ] = dis.READ_KEY();
+	dis.close();
+#endif
+	return array;
+}
+
+/** Loads elements from a file given by a filename, storing them in a new array.
+ *
+ * <P>Note that the length of the returned array will be computed 
+ * dividing the specified file size by the number of bytes used to
+ * represent each element.
+ *
+ * @param filename a filename.
+ * @return an array filled with the content of the specified file.
+ */
+public static KEY_TYPE[] LOAD_KEYS( final CharSequence filename ) throws IOException {
+	return LOAD_KEYS( new File( filename.toString() ) );
+}
+
+/** Stores an array fragment to a given data output.
+ *
+ * @param array an array whose elements will be written to <code>dataOutput</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param dataOutput a data output.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final DataOutput dataOutput ) throws IOException {
+	PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length );
+#if #keyclass(Byte)
+	write( dataOutput, array, offset, length );
+#else
+	for( int i = 0; i < length; i++ ) dataOutput.WRITE_KEY( array[ offset + i ] );
+#endif
+}
+
+/** Stores an array to a given data output.
+ *
+ * @param array an array whose elements will be written to <code>dataOutput</code>.
+ * @param dataOutput a data output.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final DataOutput dataOutput ) throws IOException {
+#if #keyclass(Byte)
+	write( dataOutput, array, 0, array.length );
+#else
+	final int length = array.length;
+	for( int i = 0; i < length; i++ ) dataOutput.WRITE_KEY( array[ i ] );
+#endif
+}
+
+/** Stores an array fragment to a file given by a {@link File} object.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final File file ) throws IOException {
+	PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length );
+#if #keyclass(Byte)
+	final OutputStream os = new FastBufferedOutputStream( new FileOutputStream( file ) );
+	write( os, array, offset, length );
+	os.close();
+#else
+	final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+	for( int i = 0; i < length; i++ ) dos.WRITE_KEY( array[ offset + i ] );
+	dos.close();
+#endif
+}
+
+/** Stores an array fragment to a file given by a pathname.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final CharSequence filename ) throws IOException {
+	STORE_KEYS( array, offset, length, new File( filename.toString() ) );
+}
+
+/** Stores an array to a file given by a {@link File} object.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final File file ) throws IOException {
+#if #keyclass(Byte)
+	final OutputStream os = new FastBufferedOutputStream( new FileOutputStream( file ) );
+	write( os, array, 0, array.length );
+	os.close();
+#else
+	final int length = array.length;
+	final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+	for( int i = 0; i < length; i++ ) dos.WRITE_KEY( array[ i ] );
+	dos.close();
+#endif
+}
+
+/** Stores an array to a file given by a pathname.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final CharSequence filename ) throws IOException {
+	STORE_KEYS( array, new File( filename.toString() ) );
+}
+
+
+
+
+
+/** Loads elements from a given data input, storing them in a given big-array fragment.
+ *
+ * @param dataInput a data input.
+ * @param array a big array which will be filled with data from <code>dataInput</code>.
+ * @param offset the index of the first element of <code>bigArray</code> to be filled.
+ * @param length the number of elements of <code>bigArray</code> to be filled.
+ * @return the number of elements actually read from <code>dataInput</code> (it might be less than <code>length</code> if <code>dataInput</code> ends).
+ */
+public static long LOAD_KEYS( final DataInput dataInput, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException {
+	PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length );
+	long c = 0;
+	try {
+		for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) {
+			final KEY_TYPE[] t = array[ i ];
+			final int l = (int)Math.min( t.length, offset + length - start( i ) );
+			for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) {
+				t[ d ] = dataInput.READ_KEY();
+				c++;
+			}
+		}
+	}
+	catch( EOFException itsOk ) {}
+	return c;
+}
+
+/** Loads elements from a given data input, storing them in a given big array.
+ *
+ * @param dataInput a data input.
+ * @param array a big array which will be filled with data from <code>dataInput</code>.
+ * @return the number of elements actually read from <code>dataInput</code> (it might be less than the array length if <code>dataInput</code> ends).
+ */
+public static long LOAD_KEYS( final DataInput dataInput, final KEY_TYPE[][] array ) throws IOException {
+	long c = 0;
+	try {
+		for( int i = 0; i < array.length; i++ ) {
+			final KEY_TYPE[] t = array[ i ];
+			final int l = t.length;
+			for( int d = 0; d < l; d++ ) {
+				t[ d ] = dataInput.READ_KEY();
+				c++;
+			}
+		}
+	}
+	catch( EOFException itsOk ) {}
+	return c;
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a given big-array fragment.
+ *
+ * @param file a file.
+ * @param array a big array which will be filled with data from the specified file.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from the given file (it might be less than <code>length</code> if the file is too short).
+ */
+public static long LOAD_KEYS( final File file, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException {
+	PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length );
+
+	final FileInputStream fis = new FileInputStream( file );
+#if #keyclass(Byte)
+	final long result = read( fis, array, offset, length );
+	fis.close();
+	return result;
+#else
+	final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) );
+
+	long c = 0;
+	try {
+		for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) {
+			final KEY_TYPE[] t = array[ i ];
+			final int l = (int)Math.min( t.length, offset + length - start( i ) );
+			for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) {
+				t[ d ] = dis.READ_KEY();
+				c++;
+			}
+		}
+	}
+	catch( EOFException itsOk ) {}
+	dis.close();
+	return c;
+#endif
+}
+
+/** Loads elements from a file given by a pathname, storing them in a given big-array fragment.
+ *
+ * @param filename a filename.
+ * @param array an array which will be filled with data from the specified file.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from the given file (it might be less than <code>length</code> if the file is too short).
+ */
+public static long LOAD_KEYS( final CharSequence filename, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException {
+	return LOAD_KEYS( new File( filename.toString() ), array, offset, length );
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a given big array.
+ *
+ * @param file a file.
+ * @param array a big array which will be filled with data from the specified file.
+ * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short).
+ */
+public static long LOAD_KEYS( final File file, final KEY_TYPE[][] array ) throws IOException {
+	final FileInputStream fis = new FileInputStream( file );
+#if #keyclass(Byte)
+	final long result = read( fis, array, 0, BIG_ARRAYS.length( array ) );
+	fis.close();
+	return result;
+#else
+	final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) );
+
+	long c = 0;
+	try {
+		for( int i = 0; i < array.length; i++ ) {
+			final KEY_TYPE[] t = array[ i ];
+			final int l = t.length;
+			for( int d = 0; d < l; d++ ) {
+				t[ d ] = dis.READ_KEY();
+				c++;
+			}
+		}
+	}
+	catch( EOFException itsOk ) {}
+
+	dis.close();
+
+	return c;
+#endif
+}
+
+/** Loads elements from a file given by a pathname, storing them in a given big array.
+ *
+ * @param filename a filename.
+ * @param array a big array which will be filled with data from the specified file.
+ * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short).
+ */
+public static long LOAD_KEYS( final CharSequence filename, final KEY_TYPE[][] array ) throws IOException {
+	return LOAD_KEYS( new File( filename.toString() ), array );
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a new big array.
+ *
+ * <P>Note that the length of the returned big array will be computed 
+ * dividing the specified file size by the number of bytes used to
+ * represent each element.
+ *
+ * @param file a file.
+ * @return a big array filled with the content of the specified file.
+ */
+public static KEY_TYPE[][] LOAD_KEYS_BIG( final File file ) throws IOException {
+	final FileInputStream fis = new FileInputStream( file );
+
+#if #keyclass(Boolean)
+	final long length = fis.getChannel().size();
+#else
+	final long length = fis.getChannel().size() / ( KEY_CLASS.SIZE / 8 );
+#endif
+
+	final KEY_TYPE[][] array = BIG_ARRAYS.newBigArray( length );
+
+#if #keyclass(Byte)
+	if ( read( fis, array, 0, length ) < length ) throw new EOFException();
+	fis.close();
+#else
+	final DataInputStream dis = new DataInputStream( new FastBufferedInputStream( fis ) );
+
+	for( int i = 0; i < array.length; i++ ) {
+		final KEY_TYPE[] t = array[ i ];
+		final int l = t.length;
+		for( int d = 0; d < l; d++ ) t[ d ] = dis.READ_KEY();
+	}
+
+	dis.close();
+#endif
+	return array;
+}
+
+/** Loads elements from a file given by a filename, storing them in a new big array.
+ *
+ * <P>Note that the length of the returned big array will be computed 
+ * dividing the specified file size by the number of bytes used to
+ * represent each element.
+ *
+ * @param filename a filename.
+ * @return a big array filled with the content of the specified file.
+ */
+public static KEY_TYPE[][] LOAD_KEYS_BIG( final CharSequence filename ) throws IOException {
+	return LOAD_KEYS_BIG( new File( filename.toString() ) );
+}
+
+/** Stores an array fragment to a given data output.
+ *
+ * @param array an array whose elements will be written to <code>dataOutput</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param dataOutput a data output.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final DataOutput dataOutput ) throws IOException {
+	PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length );
+#if #keyclass(Byte)
+	write( dataOutput, array, offset, length );
+#else
+	for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) {
+		final KEY_TYPE[] t = array[ i ];
+		final int l = (int)Math.min( t.length, offset + length - start( i ) );
+		for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) dataOutput.WRITE_KEY( t[ d ] );
+	}
+#endif
+}
+
+/** Stores a big array to a given data output.
+ *
+ * @param array a big array whose elements will be written to <code>dataOutput</code>.
+ * @param dataOutput a data output.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final DataOutput dataOutput ) throws IOException {
+#if #keyclass(Byte)
+	write( dataOutput, array, 0, BIG_ARRAYS.length( array ) );
+#else
+	for( int i = 0; i < array.length; i++ ) {
+		final KEY_TYPE[] t = array[ i ];
+		final int l = t.length;
+		for( int d = 0; d < l; d++ ) dataOutput.WRITE_KEY( t[ d ] );
+	}
+#endif
+}
+
+/** Stores a big-array fragment to a file given by a {@link File} object.
+ *
+ * @param array a big array whose elements will be written to <code>filename</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final File file ) throws IOException {
+	PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length );
+#if #keyclass(Byte)
+	final OutputStream os = new FastBufferedOutputStream( new FileOutputStream( file ) );
+	write( os, array, offset, length );
+	os.close();
+#else
+	final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+	for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) {
+		final KEY_TYPE[] t = array[ i ];
+		final int l = (int)Math.min( t.length, offset + length - start( i ) );
+		for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) dos.WRITE_KEY( t[ d ] );
+	}
+	dos.close();
+	
+#endif
+}
+
+/** Stores a big-array fragment to a file given by a pathname.
+ *
+ * @param array a big array whose elements will be written to <code>filename</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final CharSequence filename ) throws IOException {
+	STORE_KEYS( array, offset, length, new File( filename.toString() ) );
+}
+
+/** Stores an array to a file given by a {@link File} object.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final File file ) throws IOException {
+#if #keyclass(Byte)
+	final OutputStream os = new FastBufferedOutputStream( new FileOutputStream( file ) );
+	write( os, array, 0, BIG_ARRAYS.length( array ) );
+	os.close();
+#else
+	final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+	for( int i = 0; i < array.length; i++ ) {
+		final KEY_TYPE[] t = array[ i ];
+		final int l = t.length;
+		for( int d = 0; d < l; d++ ) dos.WRITE_KEY( t[ d ] );
+	}
+	dos.close();
+#endif
+}
+
+/** Stores a big array to a file given by a pathname.
+ *
+ * @param array a big array whose elements will be written to <code>filename</code>.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final CharSequence filename ) throws IOException {
+	STORE_KEYS( array, new File( filename.toString() ) );
+}
+
+
+
+
+/** Stores the element returned by an iterator to a given data output.
+ *
+ * @param i an iterator whose output will be written to <code>dataOutput</code>.
+ * @param dataOutput a filename.
+ */
+public static void STORE_KEYS( final KEY_ITERATOR i, final DataOutput dataOutput ) throws IOException {
+	while( i.hasNext() ) dataOutput.WRITE_KEY( i.NEXT_KEY() );
+}
+
+/** Stores the element returned by an iterator to a file given by a {@link File} object.
+ *
+ * @param i an iterator whose output will be written to <code>filename</code>.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_ITERATOR i, final File file ) throws IOException {
+	final DataOutputStream dos = new DataOutputStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+	while( i.hasNext() ) dos.WRITE_KEY( i.NEXT_KEY() );
+	dos.close();
+}
+
+/** Stores the element returned by an iterator to a file given by a pathname.
+ *
+ * @param i an iterator whose output will be written to <code>filename</code>.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_ITERATOR i, final CharSequence filename ) throws IOException {
+	STORE_KEYS( i, new File( filename.toString() ) );
+}
+
+/** A wrapper that exhibits the content of a data input stream as a type-specific iterator. */
+
+final private static class KEY_DATA_INPUT_WRAPPER extends KEY_ABSTRACT_ITERATOR {
+	final private DataInput dataInput;
+	private boolean toAdvance = true;
+	private boolean endOfProcess = false;
+	private KEY_TYPE next;
+
+	public KEY_DATA_INPUT_WRAPPER( final DataInput dataInput ) {
+		this.dataInput = dataInput;
+	}
+
+	public boolean hasNext() {
+		if ( ! toAdvance ) return ! endOfProcess;
+
+		toAdvance = false;
+
+		try {
+			next = dataInput.READ_KEY();
+		}
+		catch( EOFException eof ) {
+			endOfProcess = true;
+		}
+		catch( IOException rethrow ) { throw new RuntimeException( rethrow ); }
+
+		return ! endOfProcess;
+	}
+
+	public KEY_TYPE NEXT_KEY() {
+		if (! hasNext()) throw new NoSuchElementException();
+		toAdvance = true;
+		return next;
+	}
+}
+
+
+
+/** Wraps the given data input stream into an iterator.
+ *
+ * @param dataInput a data input.
+ */
+public static KEY_ITERATOR AS_KEY_ITERATOR( final DataInput dataInput ) {
+	return new KEY_DATA_INPUT_WRAPPER( dataInput );
+}
+
+/** Wraps a file given by a {@link File} object into an iterator.
+ *
+ * @param file a file.
+ */
+public static KEY_ITERATOR AS_KEY_ITERATOR( final File file ) throws IOException {
+	return new KEY_DATA_INPUT_WRAPPER( new DataInputStream( new FastBufferedInputStream( new FileInputStream( file ) ) ) );
+}
+
+/** Wraps a file given by a pathname into an iterator.
+ *
+ * @param filename a filename.
+ */
+public static KEY_ITERATOR AS_KEY_ITERATOR( final CharSequence filename ) throws IOException {
+	return AS_KEY_ITERATOR( new File( filename.toString() ) );
+}
+
diff --git a/drv/Collection.drv b/drv/Collection.drv
new file mode 100644
index 0000000..48966ed
--- /dev/null
+++ b/drv/Collection.drv
@@ -0,0 +1,140 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Collection;
+
+/** A type-specific {@link Collection}; provides some additional methods
+ * that use polymorphism to avoid (un)boxing.
+ *
+ * <P>Additionally, this class defines strengthens (again) {@link #iterator()} and defines
+ * a slightly different semantics for {@link #toArray(Object[])}.
+ *
+ * @see Collection
+ */
+
+public interface COLLECTION KEY_GENERIC extends Collection<KEY_GENERIC_CLASS>, KEY_ITERABLE KEY_GENERIC {
+
+	/** Returns a type-specific iterator on the elements of this collection.
+	 *
+	 * <p>Note that this specification strengthens the one given in 
+	 * {@link java.lang.Iterable#iterator()}, which was already 
+	 * strengthened in the corresponding type-specific class,
+	 * but was weakened by the fact that this interface extends {@link Collection}.
+	 *
+	 * @return a type-specific iterator on the elements of this collection.
+	 */
+	KEY_ITERATOR KEY_GENERIC iterator();
+
+	/** Returns a type-specific iterator on this elements of this collection.
+	 *
+	 * @see #iterator()
+	 * @deprecated As of <code>fastutil</code> 5, replaced by {@link #iterator()}.
+	 */
+	@Deprecated
+	KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD();
+
+	/** Returns an containing the items of this collection;
+	 * the runtime type of the returned array is that of the specified array. 
+	 *
+	 * <p><strong>Warning</strong>: Note that, contrarily to {@link Collection#toArray(Object[])}, this
+	 * methods just writes all elements of this collection: no special 
+	 * value will be added after the last one.
+	 *
+	 * @param a if this array is big enough, it will be used to store this collection.
+	 * @return a primitive type array containing the items of this collection.
+	 * @see Collection#toArray(Object[])
+	 */
+	<T> T[] toArray(T[] a);
+
+
+#if #keys(primitive)
+
+	/**
+	 * @see Collection#contains(Object)
+	 */
+	boolean contains( KEY_TYPE key );
+
+	/** Returns a primitive type array containing the items of this collection. 
+	 * @return a primitive type array containing the items of this collection.
+	 * @see Collection#toArray()
+	 */
+	KEY_TYPE[] TO_KEY_ARRAY();
+
+	/** Returns a primitive type array containing the items of this collection.
+	 *
+	 * <p>Note that, contrarily to {@link Collection#toArray(Object[])}, this
+	 * methods just writes all elements of this collection: no special 
+	 * value will be added after the last one.
+	 *
+	 * @param a if this array is big enough, it will be used to store this collection.
+	 * @return a primitive type array containing the items of this collection.
+	 * @see Collection#toArray(Object[])
+	 */
+	KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE a[] );
+
+	/** Returns a primitive type array containing the items of this collection. 
+	 *
+	 * <p>Note that, contrarily to {@link Collection#toArray(Object[])}, this
+	 * methods just writes all elements of this collection: no special 
+	 * value will be added after the last one.
+	 *
+	 * @param a if this array is big enough, it will be used to store this collection.
+	 * @return a primitive type array containing the items of this collection.
+	 * @see Collection#toArray(Object[])
+	 */
+	KEY_TYPE[] toArray( KEY_TYPE a[] );
+
+
+	/**
+	 * @see Collection#add(Object)
+	 */
+	boolean add( KEY_TYPE key );
+
+	/** Note that this method should be called {@link java.util.Collection#remove(Object) remove()}, but the clash
+	 * with the similarly named index-based method in the {@link java.util.List} interface
+	 * forces us to use a distinguished name. For simplicity, the set interfaces reinstates
+	 * <code>remove()</code>.
+	 *
+	 * @see Collection#remove(Object)
+	 */
+	boolean rem( KEY_TYPE key );
+
+	/**
+	 * @see Collection#addAll(Collection)
+	 */
+	boolean addAll( COLLECTION c );
+
+	/**
+	 * @see Collection#containsAll(Collection)
+	 */
+	boolean containsAll( COLLECTION c );
+
+	/**
+	 * @see Collection#removeAll(Collection)
+	 */
+	boolean removeAll( COLLECTION c );
+
+	/**
+	 * @see Collection#retainAll(Collection)
+	 */
+	boolean retainAll( COLLECTION c );
+
+#endif
+
+}
diff --git a/drv/Collections.drv b/drv/Collections.drv
new file mode 100644
index 0000000..81eca9c
--- /dev/null
+++ b/drv/Collections.drv
@@ -0,0 +1,265 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Collection;
+
+import it.unimi.dsi.fastutil.objects.ObjectArrays;
+
+
+/** A class providing static methods and objects that do useful things with type-specific collections.
+ *
+ * @see java.util.Collections
+ */
+
+public class COLLECTIONS {
+
+	private COLLECTIONS() {}
+
+	/** An immutable class representing an empty type-specific collection.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific collection.
+	 */
+
+	public abstract static class EmptyCollection KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC {
+		
+		protected EmptyCollection() {}
+
+		public boolean add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+
+		public boolean contains( KEY_TYPE k ) { return false; }
+           
+		public Object[] toArray() { return ObjectArrays.EMPTY_ARRAY; }
+
+#if #keys(primitive)
+		public KEY_TYPE[] TO_KEY_ARRAY( KEY_TYPE[] a ) { return a; }
+		public KEY_TYPE[] TO_KEY_ARRAY() { return ARRAYS.EMPTY_ARRAY; }
+
+		public boolean rem( KEY_TYPE k ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean removeAll( COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean retainAll( COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean containsAll( COLLECTION c ) { return c.isEmpty(); }
+#else
+		public boolean remove( final Object k ) { throw new UnsupportedOperationException(); }
+		public <T> T[] toArray( T[] a ) { return a; }
+#endif
+
+		@SuppressWarnings("unchecked")
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return ITERATORS.EMPTY_ITERATOR; }
+
+		public int size() { return 0; }
+		public void clear() {}
+
+		public int hashCode() { return 0; }
+		public boolean equals( Object o ) { 
+			if ( o == this ) return true;
+			if ( ! ( o instanceof Collection ) ) return false;
+			return ((Collection<?>)o).isEmpty();
+		}
+	}
+
+
+	/** A synchronized wrapper class for collections. */
+
+	public static class SynchronizedCollection KEY_GENERIC implements COLLECTION KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final COLLECTION KEY_GENERIC collection;
+		protected final Object sync;
+
+		protected SynchronizedCollection( final COLLECTION KEY_GENERIC c, final Object sync ) {
+			if ( c == null ) throw new NullPointerException();
+			this.collection = c;
+			this.sync = sync;
+		}
+
+		protected SynchronizedCollection( final COLLECTION KEY_GENERIC c ) {
+			if ( c == null ) throw new NullPointerException();
+			this.collection = c;
+			this.sync = this;
+		}
+
+		public int size() { synchronized( sync ) { return collection.size(); } }
+		public boolean isEmpty() { synchronized( sync ) { return collection.isEmpty(); } }
+		public boolean contains( final KEY_TYPE o ) { synchronized( sync ) { return collection.contains( o ); } }
+
+		public KEY_TYPE[] TO_KEY_ARRAY() { synchronized( sync ) { return collection.TO_KEY_ARRAY(); } }
+
+#if #keys(primitive)
+		public Object[] toArray() { synchronized( sync ) { return collection.toArray(); } }
+		public KEY_TYPE[] TO_KEY_ARRAY( final KEY_TYPE[] a ) { synchronized( sync ) { return collection.TO_KEY_ARRAY( a ); } }
+		public KEY_TYPE[] toArray( final KEY_TYPE[] a ) { synchronized( sync ) { return collection.TO_KEY_ARRAY( a ); } }
+
+		public boolean addAll( final COLLECTION c ) { synchronized( sync ) { return collection.addAll( c ); } }
+		public boolean containsAll( final COLLECTION c ) { synchronized( sync ) { return collection.containsAll( c ); } }
+		public boolean removeAll( final COLLECTION c ) { synchronized( sync ) { return collection.removeAll( c ); } }
+		public boolean retainAll( final COLLECTION c ) { synchronized( sync ) { return collection.retainAll( c ); } }
+
+		public boolean add( final KEY_GENERIC_CLASS k ) { synchronized( sync ) { return collection.add( k ); } }
+		public boolean contains( final Object k ) { synchronized( sync ) { return collection.contains( k ); } }
+#endif
+
+		public <T> T[] toArray( final T[] a ) { synchronized( sync ) { return collection.toArray( a ); } }
+
+		public KEY_ITERATOR KEY_GENERIC iterator() { return collection.iterator(); }
+
+		@Deprecated
+		public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); }
+
+		public boolean add( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return collection.add( k ); } }
+		public boolean rem( final KEY_TYPE k ) { synchronized( sync ) { return collection.REMOVE( k ); } }
+		public boolean remove( final Object ok ) { synchronized( sync ) { return collection.remove( ok ); } }
+
+		public boolean addAll( final Collection<? extends KEY_GENERIC_CLASS> c ) { synchronized( sync ) { return collection.addAll( c ); } }
+		public boolean containsAll( final Collection<?> c ) { synchronized( sync ) { return collection.containsAll( c ); } }
+		public boolean removeAll( final Collection<?> c ) { synchronized( sync ) { return collection.removeAll( c ); } }
+		public boolean retainAll( final Collection<?> c ) { synchronized( sync ) { return collection.retainAll( c ); } }
+
+		public void clear() { synchronized( sync ) { collection.clear(); } }
+		public String toString() { synchronized( sync ) { return collection.toString(); } }
+	}
+
+
+	/** Returns a synchronized collection backed by the specified collection.
+	 *
+	 * @param c the collection to be wrapped in a synchronized collection.
+	 * @return a synchronized view of the specified collection.
+	 * @see java.util.Collections#synchronizedCollection(Collection)
+	 */
+	public static KEY_GENERIC COLLECTION KEY_GENERIC synchronize( final COLLECTION KEY_GENERIC c ) { return new SynchronizedCollection KEY_GENERIC( c ); }
+
+	/** Returns a synchronized collection backed by the specified collection, using an assigned object to synchronize.
+	 *
+	 * @param c the collection to be wrapped in a synchronized collection.
+	 * @param sync an object that will be used to synchronize the list access.
+	 * @return a synchronized view of the specified collection.
+	 * @see java.util.Collections#synchronizedCollection(Collection)
+	 */
+
+	public static KEY_GENERIC COLLECTION KEY_GENERIC synchronize( final COLLECTION KEY_GENERIC c, final Object sync ) { return new SynchronizedCollection KEY_GENERIC( c, sync ); }
+
+
+	/** An unmodifiable wrapper class for collections. */
+
+	public static class UnmodifiableCollection KEY_GENERIC implements COLLECTION KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final COLLECTION KEY_GENERIC collection;
+
+		protected UnmodifiableCollection( final COLLECTION KEY_GENERIC c ) {
+			if ( c == null ) throw new NullPointerException();
+			this.collection = c;
+		}
+
+		public int size() { return collection.size(); }
+		public boolean isEmpty() { return collection.isEmpty(); }
+		public boolean contains( final KEY_TYPE o ) { return collection.contains( o ); }
+
+		public KEY_ITERATOR KEY_GENERIC iterator() { return ITERATORS.unmodifiable( collection.iterator() ); }
+
+		@Deprecated
+		public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); }
+
+		public boolean add( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public boolean remove( final Object ok ) { throw new UnsupportedOperationException(); }
+
+		public boolean addAll( final Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean containsAll( final Collection<?> c ) { return collection.containsAll( c ); }
+		public boolean removeAll( final Collection<?> c ) { throw new UnsupportedOperationException(); }
+		public boolean retainAll( final Collection<?> c ) { throw new UnsupportedOperationException(); }
+
+		public void clear() { throw new UnsupportedOperationException(); }
+		public String toString() { return collection.toString(); }
+
+		public <T> T[] toArray( final T[] a ) { return collection.toArray( a ); }
+
+		public Object[] toArray() { return collection.toArray(); }
+
+#if #keys(primitive)
+		public KEY_TYPE[] TO_KEY_ARRAY() { return collection.TO_KEY_ARRAY(); }
+		public KEY_TYPE[] TO_KEY_ARRAY( final KEY_TYPE[] a ) { return collection.TO_KEY_ARRAY( a ); }
+		public KEY_TYPE[] toArray( final KEY_TYPE[] a ) { return collection.toArray( a ); }
+		public boolean rem( final KEY_TYPE k ) { throw new UnsupportedOperationException(); }
+
+		public boolean addAll( final COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean containsAll( final COLLECTION c ) { return collection.containsAll( c ); }
+		public boolean removeAll( final COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean retainAll( final COLLECTION c ) { throw new UnsupportedOperationException(); }
+
+		public boolean add( final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public boolean contains( final Object k ) { return collection.contains( k ); }
+#endif
+	}
+
+
+	/** Returns an unmodifiable collection backed by the specified collection.
+	 *
+	 * @param c the collection to be wrapped in an unmodifiable collection.
+	 * @return an unmodifiable view of the specified collection.
+	 * @see java.util.Collections#unmodifiableCollection(Collection)
+	 */
+	public static KEY_GENERIC COLLECTION KEY_GENERIC unmodifiable( final COLLECTION KEY_GENERIC c ) { return new UnmodifiableCollection KEY_GENERIC( c ); }
+
+	/** A collection wrapper class for iterables. */
+
+	public static class IterableCollection KEY_GENERIC extends ABSTRACT_COLLECTION KEY_GENERIC implements java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final KEY_ITERABLE KEY_GENERIC iterable;
+
+		protected IterableCollection( final KEY_ITERABLE KEY_GENERIC iterable ) {
+			if ( iterable == null ) throw new NullPointerException();
+			this.iterable = iterable;
+		}
+
+		public int size() { 
+			int c = 0;
+			final KEY_ITERATOR KEY_GENERIC iterator = iterator();
+			while( iterator.hasNext() ) {
+				iterator.next();
+				c++;
+			}
+			
+			return c;
+		}
+		
+		public boolean isEmpty() { return iterable.iterator().hasNext(); }
+		public KEY_ITERATOR KEY_GENERIC iterator() { return iterable.iterator(); }
+
+		@Deprecated
+		public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); }
+	}
+
+
+	/** Returns an unmodifiable collection backed by the specified iterable.
+	 *
+	 * @param iterable the iterable object to be wrapped in an unmodifiable collection.
+	 * @return an unmodifiable collection view of the specified iterable.
+	 */
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC COLLECTION KEY_GENERIC asCollection( final KEY_ITERABLE KEY_GENERIC iterable ) { 
+		if ( iterable instanceof COLLECTION ) return (COLLECTION KEY_GENERIC)iterable;
+		return new IterableCollection KEY_GENERIC( iterable );
+	}
+
+}
diff --git a/drv/Comparator.drv b/drv/Comparator.drv
new file mode 100644
index 0000000..a9efa3c
--- /dev/null
+++ b/drv/Comparator.drv
@@ -0,0 +1,43 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Comparator;
+
+/** A type-specific {@link Comparator}; provides methods to compare two primitive types both as objects
+ * and as primitive types. 
+ *
+ * <P>Note that <code>fastutil</code> provides a corresponding abstract class that
+ * can be used to implement this interface just by specifying the type-specific
+ * comparator.
+ *
+ * @see Comparator
+ */
+
+public interface KEY_COMPARATOR KEY_GENERIC extends Comparator<KEY_GENERIC_CLASS> {
+
+	/** Compares the given primitive types.
+	 *
+	 * @see java.util.Comparator
+	 * @return A positive integer, zero, or a negative integer if the first
+	 * argument is greater than, equal to, or smaller than, respectively, the
+	 * second one.
+	 */
+
+	public int compare( KEY_TYPE k1, KEY_TYPE k2 );
+}
diff --git a/drv/Comparators.drv b/drv/Comparators.drv
new file mode 100644
index 0000000..e469caa
--- /dev/null
+++ b/drv/Comparators.drv
@@ -0,0 +1,70 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keys(reference)
+import java.util.Comparator;
+#endif
+
+/** A class providing static methods and objects that do useful things with comparators.
+ */
+
+public class COMPARATORS {
+
+	private COMPARATORS() {}
+
+	/** A type-specific comparator mimicking the natural order. */
+
+	@SuppressWarnings({"unchecked","rawtypes"})
+	public static final KEY_COMPARATOR NATURAL_COMPARATOR = new KEY_ABSTRACT_COMPARATOR() {
+			public final int compare( final KEY_TYPE a, final KEY_TYPE b ) {
+#if #keys(primitive)
+				return KEY_CMP( a, b );
+#else
+				return ((Comparable)a).compareTo(b);
+#endif
+			}
+		};
+
+	/** A type-specific comparator mimicking the opposite of the natural order. */
+
+	@SuppressWarnings({"unchecked","rawtypes"})
+	public static final KEY_COMPARATOR OPPOSITE_COMPARATOR = new KEY_ABSTRACT_COMPARATOR() {
+			public final int compare( final KEY_TYPE a, final KEY_TYPE b ) {
+#if #keys(primitive)
+				return - KEY_CMP( a, b );
+#else
+				return ((Comparable)b).compareTo(a);
+#endif
+			}
+		};
+
+	/** Returns a comparator representing the opposite order of the given comparator. 
+	 *
+	 * @param c a comparator.
+	 * @return a comparator representing the opposite order of <code>c</code>.
+	 */
+	public static KEY_GENERIC KEY_COMPARATOR KEY_GENERIC oppositeComparator( final KEY_COMPARATOR KEY_GENERIC c ) {
+		return new KEY_ABSTRACT_COMPARATOR KEY_GENERIC() {
+				private final KEY_COMPARATOR KEY_GENERIC comparator = c;
+				public final int compare( final KEY_GENERIC_TYPE a, final KEY_GENERIC_TYPE b ) {
+					return - comparator.compare( a, b );
+				}
+			};
+	}
+}
diff --git a/drv/Function.drv b/drv/Function.drv
new file mode 100644
index 0000000..76b690f
--- /dev/null
+++ b/drv/Function.drv
@@ -0,0 +1,109 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.Function;
+
+/** A type-specific {@link Function}; provides some additional methods that use polymorphism to avoid (un)boxing.
+ *
+ * <P>Type-specific versions of <code>get()</code>, <code>put()</code> and
+ * <code>remove()</code> cannot rely on <code>null</code> to denote absence of
+ * a key.  Rather, they return a {@linkplain #defaultReturnValue() default
+ * return value}, which is set to 0 cast to the return type (<code>false</code>
+ * for booleans) at creation, but can be changed using the
+ * <code>defaultReturnValue()</code> method. 
+ *
+ * <P>For uniformity reasons, even maps returning objects implement the default
+ * return value (of course, in this case the default return value is
+ * initialized to <code>null</code>).
+ *
+ * <P><strong>Warning:</strong> to fall in line as much as possible with the
+ * {@linkplain java.util.Map standard map interface}, it is strongly suggested
+ * that standard versions of <code>get()</code>, <code>put()</code> and
+ * <code>remove()</code> for maps with primitive-type values <em>return
+ * <code>null</code> to denote missing keys</em> rather than wrap the default
+ * return value in an object (of course, for maps with object keys and values
+ * this is not possible, as there is no type-specific version).
+ *
+ * @see Function
+ */
+
+public interface FUNCTION KEY_VALUE_GENERIC extends Function<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> {
+
+#if #keys(primitive) || #values(primitive)
+
+	/** Adds a pair to the map.
+	 *
+	 * @param key the key.
+	 * @param value the value.
+	 * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 * @see Function#put(Object,Object)
+	 */
+
+	VALUE_GENERIC_TYPE put( KEY_GENERIC_TYPE key, VALUE_GENERIC_TYPE value );
+
+	/** Returns the value to which the given key is mapped.
+	 *
+	 * @param key the key.
+	 * @return the corresponding value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 * @see Function#get(Object)
+	 */
+
+	VALUE_GENERIC_TYPE GET_VALUE( KEY_TYPE key );
+
+	/** Removes the mapping with the given key.
+	 * @param key
+	 * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 * @see Function#remove(Object)
+	 */
+
+	VALUE_GENERIC_TYPE REMOVE_VALUE( KEY_TYPE key );
+#endif
+
+#if #keys(primitive)
+
+	/**
+	 * @see Function#containsKey(Object)
+	 */
+
+	boolean containsKey( KEY_TYPE key );
+
+#endif	 
+
+	/** Sets the default return value. 
+	 *
+	 * This value must be returned by type-specific versions of
+	 * <code>get()</code>, <code>put()</code> and <code>remove()</code> to
+	 * denote that the map does not contain the specified key. It must be
+	 * 0/<code>false</code>/<code>null</code> by default.
+	 *
+	 * @param rv the new default return value.
+	 * @see #defaultReturnValue()
+	 */
+
+	void defaultReturnValue( VALUE_GENERIC_TYPE rv );
+
+	 
+	/** Gets the default return value.
+	 *
+	 * @return the current default return value.
+	 */
+
+	VALUE_GENERIC_TYPE defaultReturnValue();
+
+}
diff --git a/drv/Functions.drv b/drv/Functions.drv
new file mode 100644
index 0000000..75940b2
--- /dev/null
+++ b/drv/Functions.drv
@@ -0,0 +1,243 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+/** A class providing static methods and objects that do useful things with type-specific functions.
+ *
+ * @see it.unimi.dsi.fastutil.Function
+ * @see java.util.Collections
+ */
+
+public class FUNCTIONS {
+
+	private FUNCTIONS() {}
+
+
+	/** An immutable class representing an empty type-specific function.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific function.
+	 */
+
+	public static class EmptyFunction KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+	
+		protected EmptyFunction() {}
+	
+		public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { return VALUE_NULL; }
+
+		public boolean containsKey( final KEY_TYPE k ) { return false; }
+
+		public VALUE_GENERIC_TYPE defaultReturnValue()  { return VALUE_NULL; }
+		public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue )  { throw new UnsupportedOperationException(); }
+	
+#if #keys(primitive)
+		public VALUE_GENERIC_CLASS get( final Object k ) { return null; }
+#endif
+
+		public int size() { return 0; }
+		public void clear() {}
+	
+		private Object readResolve() { return EMPTY_FUNCTION; }
+
+		public Object clone() { return EMPTY_FUNCTION; }
+	}
+
+
+
+	/** An empty type-specific function (immutable). It is serializable and cloneable. */
+	 
+	@SuppressWarnings("rawtypes")
+	public static final EmptyFunction EMPTY_FUNCTION = new EmptyFunction();
+
+
+	/** An immutable class representing a type-specific singleton function.	 
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific function.
+	 */
+
+	public static class Singleton KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+	
+		protected final KEY_GENERIC_TYPE key;
+		protected final VALUE_GENERIC_TYPE value;
+
+		protected Singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) {
+			this.key = key;
+			this.value = value;
+		}
+	
+		public boolean containsKey( final KEY_TYPE k ) { return KEY_EQUALS( key, k ); }
+
+		public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) { if ( KEY_EQUALS( key, k ) ) return value; return defRetValue; }
+
+		public int size() { return 1; }
+	
+		public Object clone() { return this; }
+	}
+
+	/** Returns a type-specific immutable function containing only the specified pair. The returned function is serializable and cloneable.
+	 *
+	 * <P>Note that albeit the returned function is immutable, its default return value may be changed.
+	 *
+	 * @param key the only key of the returned function.
+	 * @param value the only value of the returned function.
+	 * @return a type-specific immutable function containing just the pair <code><key,value></code>.
+	 */
+
+	public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC singleton( final KEY_GENERIC_TYPE key, VALUE_GENERIC_TYPE value ) {
+		return new Singleton KEY_VALUE_GENERIC( key, value );
+	}
+
+#if #keys(primitive) || #values(primitive)
+
+	/** Returns a type-specific immutable function containing only the specified pair. The returned function is serializable and cloneable.
+	 *
+	 * <P>Note that albeit the returned function is immutable, its default return value may be changed.
+	 *
+	 * @param key the only key of the returned function.
+	 * @param value the only value of the returned function.
+	 * @return a type-specific immutable function containing just the pair <code><key,value></code>.
+	 */
+
+	public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC singleton( final KEY_GENERIC_CLASS key, final VALUE_GENERIC_CLASS value ) {
+		return new Singleton KEY_VALUE_GENERIC( KEY_CLASS2TYPE( key ), VALUE_CLASS2TYPE( value ) );
+	}
+
+#endif
+
+
+	/** A synchronized wrapper class for functions. */
+
+	public static class SynchronizedFunction KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final FUNCTION KEY_VALUE_GENERIC function;
+		protected final Object sync;
+
+		protected SynchronizedFunction( final FUNCTION KEY_VALUE_GENERIC f, final Object sync ) {
+			if ( f == null ) throw new NullPointerException();
+			this.function = f;
+			this.sync = sync;
+		}
+
+		protected SynchronizedFunction( final FUNCTION KEY_VALUE_GENERIC f ) {
+			if ( f == null ) throw new NullPointerException();
+			this.function = f;
+			this.sync = this;
+		}
+
+		public int size() { synchronized( sync ) { return function.size(); } }
+		public boolean containsKey( final KEY_TYPE k ) { synchronized( sync ) { return function.containsKey( k ); } }
+
+		public VALUE_GENERIC_TYPE defaultReturnValue()  { synchronized( sync ) { return function.defaultReturnValue(); } }
+		public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue )  { synchronized( sync ) { function.defaultReturnValue( defRetValue ); } }
+
+		public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { synchronized( sync ) { return function.put( k, v ); } }
+
+		public void clear() { synchronized( sync ) { function.clear(); } }
+		public String toString() { synchronized( sync ) { return function.toString(); } }
+
+#if #keys(primitive) || #values(primitive)
+		public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS k, final VALUE_GENERIC_CLASS v ) { synchronized( sync ) { return function.put( k, v ); } }
+		public VALUE_GENERIC_CLASS get( final Object k ) { synchronized( sync ) { return function.get( k ); } }
+		public VALUE_GENERIC_CLASS remove( final Object k ) { synchronized( sync ) { return function.remove( k ); } }
+#endif
+
+#if #keys(primitive)
+		public VALUE_GENERIC_TYPE remove( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return function.remove( k ); } }
+		public VALUE_GENERIC_TYPE get( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return function.get( k ); } }
+		public boolean containsKey( final Object ok ) { synchronized( sync ) { return function.containsKey( ok ); } }
+#endif
+
+#if #keys(reference)
+		public VALUE_GENERIC_TYPE REMOVE_VALUE( final Object k ) { synchronized( sync ) { return function.REMOVE_VALUE( k ); } }
+		public VALUE_GENERIC_TYPE GET_VALUE( final Object k ) { synchronized( sync ) { return function.GET_VALUE( k ); } }
+#endif
+
+	}
+
+	/** Returns a synchronized type-specific function backed by the given type-specific function.
+	 *
+	 * @param f the function to be wrapped in a synchronized function.
+	 * @return a synchronized view of the specified function.
+	 * @see java.util.Collections#synchronizedMap(java.util.Map)
+	 */
+	public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC synchronize( final FUNCTION KEY_VALUE_GENERIC f ) { return new SynchronizedFunction KEY_VALUE_GENERIC( f ); }
+
+	/** Returns a synchronized type-specific function backed by the given type-specific function, using an assigned object to synchronize.
+	 *
+	 * @param f the function to be wrapped in a synchronized function.
+	 * @param sync an object that will be used to synchronize the access to the function.
+	 * @return a synchronized view of the specified function.
+	 * @see java.util.Collections#synchronizedMap(java.util.Map)
+	 */
+
+	public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC synchronize( final FUNCTION KEY_VALUE_GENERIC f, final Object sync ) { return new SynchronizedFunction KEY_VALUE_GENERIC( f, sync ); }
+
+
+
+	/** An unmodifiable wrapper class for functions. */
+
+	public static class UnmodifiableFunction KEY_VALUE_GENERIC extends ABSTRACT_FUNCTION KEY_VALUE_GENERIC implements java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final FUNCTION KEY_VALUE_GENERIC function;
+
+		protected UnmodifiableFunction( final FUNCTION KEY_VALUE_GENERIC f ) {
+			if ( f == null ) throw new NullPointerException();
+			this.function = f;
+		}
+
+		public int size() { return function.size(); }
+		public boolean containsKey( final KEY_TYPE k ) { return function.containsKey( k ); }
+
+		public VALUE_GENERIC_TYPE defaultReturnValue()  { return function.defaultReturnValue(); }
+		public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue )  { throw new UnsupportedOperationException(); }
+
+		public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+
+		public void clear() { throw new UnsupportedOperationException(); }
+		public String toString() { return function.toString(); }
+
+#if #keys(primitive)
+		public VALUE_GENERIC_TYPE remove( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public VALUE_GENERIC_TYPE get( final KEY_GENERIC_TYPE k ) { return function.get( k ); }
+		public boolean containsKey( final Object ok ) { return function.containsKey( ok ); }
+#endif
+
+#if #keys(reference) || #values(reference)
+		public VALUE_GENERIC_TYPE REMOVE_VALUE( final Object k ) { throw new UnsupportedOperationException(); }
+		public VALUE_GENERIC_TYPE GET_VALUE( final Object k ) { return function.GET_VALUE( k ); }
+#endif
+
+	}
+
+	/** Returns an unmodifiable type-specific function backed by the given type-specific function.
+	 *
+	 * @param f the function to be wrapped in an unmodifiable function.
+	 * @return an unmodifiable view of the specified function.
+	 * @see java.util.Collections#unmodifiableMap(java.util.Map)
+	 */
+	public static KEY_VALUE_GENERIC FUNCTION KEY_VALUE_GENERIC unmodifiable( final FUNCTION KEY_VALUE_GENERIC f ) { return new UnmodifiableFunction KEY_VALUE_GENERIC( f ); }
+}
diff --git a/drv/Hash.drv b/drv/Hash.drv
new file mode 100644
index 0000000..e6a777f
--- /dev/null
+++ b/drv/Hash.drv
@@ -0,0 +1,52 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.Hash;
+
+/** A type-specific {@link Hash} interface.
+ *
+ * @see Hash
+ */
+
+public interface HASH {
+
+	/** A type-specific hash strategy.
+	 *
+	 * @see it.unimi.dsi.fastutil.Hash.Strategy
+	 */
+
+	public interface Strategy {
+
+		/** Returns the hash code of the specified element with respect to this hash strategy.
+		 *
+		 * @param e an element.
+		 * @return the hash code of the given element with respect to this hash strategy.
+		 */
+
+		public int hashCode( KEY_TYPE e );
+
+		/** Returns true if the given elements are equal with respect to this hash strategy.
+		 *
+		 * @param a an element.
+		 * @param b another element.
+		 * @return true if the two specified elements are equal with respect to this hash strategy.
+		 */
+		public boolean equals( KEY_TYPE a, KEY_TYPE b );
+	}
+}
diff --git a/drv/HeapIndirectDoublePriorityQueue.drv b/drv/HeapIndirectDoublePriorityQueue.drv
new file mode 100644
index 0000000..3b30020
--- /dev/null
+++ b/drv/HeapIndirectDoublePriorityQueue.drv
@@ -0,0 +1,654 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+import it.unimi.dsi.fastutil.IndirectDoublePriorityQueue;
+#endif
+
+
+/** A type-specific heap-based indirect double priority queue.
+ *
+ * <P>Instances of this class are based on two indirect
+ * heap-based queues. The queues are enlarged as needed, but they are never
+ * shrunk. Use the {@link #trim()} method to reduce their size, if necessary.
+ *
+ * <P>Either comparator may be <code>null</code>, indicating that natural comparison should take place. Of course,
+ * it makes little sense having them equal.
+ */
+
+public class HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE KEY_GENERIC extends HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC implements INDIRECT_DOUBLE_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The secondary indirect queue. */
+	protected HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC secondaryQueue;
+
+	/** Creates a new empty queue with a given capacity.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c, KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		super( refArray, capacity, c );
+		secondaryQueue = new HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC( refArray, capacity, d );
+	}
+
+
+	/** Creates a new empty queue with a given capacity.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	@SuppressWarnings("unchecked")
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		super( refArray, capacity, c );
+		secondaryQueue = new HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC( refArray, capacity, c == null ? COMPARATORS.OPPOSITE_COMPARATOR : COMPARATORS.oppositeComparator( c ) );
+	}
+
+
+	/** Creates a new empty queue with a given capacity and natural order as primary comparator.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) {
+		this( refArray, capacity, null );
+	}
+
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c, KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		this( refArray, refArray.length, c, d );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, refArray.length, c );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and natural order as primary comparator.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray ) {
+		this( refArray, refArray.length, null );
+	}
+
+
+	/** Wraps a given array in a queue using the given comparators.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c, final KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		super( refArray, a, size, c );
+		this.secondaryQueue = new HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC( refArray, a.clone(), size, d );
+	}
+
+	/** Wraps a given array in a queue using the given comparators.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first elements of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c, final KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		this( refArray, a, a.length, c, d );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	@SuppressWarnings("unchecked")
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, a, size, c, c == null ? COMPARATORS.OPPOSITE_COMPARATOR : COMPARATORS.oppositeComparator( c ) );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, a, a.length, c );
+	}
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) {
+		this( refArray, a, size, null );
+	}
+
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 */
+	public HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) {
+		this( refArray, a, a.length );
+	}
+
+
+	public void enqueue( int x ) {
+		secondaryQueue.enqueue( x );
+		super.enqueue( x );
+	}
+
+	public int dequeue() {
+		final int result = super.dequeue();
+		secondaryQueue.remove( result );
+		return result;
+	}
+
+	public int secondaryFirst() {
+		return secondaryQueue.first();
+	}
+
+	public int secondaryLast() { throw new UnsupportedOperationException(); }
+
+	public void changed() {
+		secondaryQueue.changed( heap[ 0 ] );
+		super.changed();
+	}
+
+	public void changed( final int index ) {
+		secondaryQueue.changed( index );
+		super.changed( index );
+	}
+
+
+	public void allChanged() {
+		secondaryQueue.allChanged();
+		super.allChanged();
+	}
+
+	public void clear() { 
+		super.clear();
+		secondaryQueue.clear();
+	}
+
+	public boolean remove( final int index ) {
+		secondaryQueue.remove( index );
+		return super.remove( index );
+	}
+
+	public int secondaryFront( final int[] a ) {
+		return secondaryQueue.front( a );
+	}
+
+	/** Trims the underlying queues so they have exactly {@link #size()} elements.
+	 */
+
+	public void trim() {
+		super.trim();
+		secondaryQueue.trim();
+	}
+
+	/** Returns the secondary comparator of this queue.
+	 *
+	 * @return the secondary comparator of this queue.
+	 * @see #secondaryFirst()
+	 */
+	public KEY_COMPARATOR KEY_SUPER_GENERIC secondaryComparator() { return secondaryQueue.comparator(); }
+
+
+#ifdef TEST
+
+	/** The original class, now just used for testing. */
+
+	private static class TestQueue {
+
+		/** The reference array */
+		private KEY_TYPE refArray[];
+		/** Its length */
+		private int N;
+		/** The number of elements in the heaps */
+		private int n;
+		/** The two comparators */
+		private KEY_COMPARATOR primaryComp, secondaryComp;
+		/** Two indirect heaps are used, called <code>primary</code> and <code>secondary</code>. Each of them contains
+			a permutation of <code>n</code> among the indices 0, 1, ..., <code>N</code>-1 in such a way that the corresponding
+			objects be sorted with respect to the two comparators.
+			We also need an array <code>inSec[]</code> so that <code>inSec[k]</code> is the index of <code>secondary</code> 
+			containing <code>k</code>.
+		*/
+		private int primary[], secondary[], inSec[];
+
+		/** Builds a double indirect priority queue.
+		 *  @param refArray The reference array.
+		 *  @param primaryComp The primary comparator.
+		 *  @param secondaryComp The secondary comparator.
+		 */
+		public TestQueue( KEY_TYPE refArray[], KEY_COMPARATOR primaryComp, KEY_COMPARATOR secondaryComp ) {
+			this.refArray = refArray;
+			this.N = refArray.length;
+			assert this.N != 0;
+			this.n = 0;
+			this.primaryComp = primaryComp;
+			this.secondaryComp = secondaryComp;
+			this.primary = new int[N];
+			this.secondary = new int[N];
+			this.inSec = new int[N];
+			java.util.Arrays.fill( inSec, -1 );
+		}
+
+		/** Adds an index to the queue. Notice that the index should not be already present in the queue.
+		 *  @param i The index to be added
+		 */
+		public void add( int i ) {
+			if ( i < 0 || i >= refArray.length ) throw new IndexOutOfBoundsException();
+			if ( inSec[ i ] >= 0 ) throw new IllegalArgumentException();
+			primary[n] = i;
+			secondary[n] = i; inSec[i] = n;
+			n++;
+			swimPrimary( n-1 );
+			swimSecondary( n-1 );
+		}
+
+		/** Heapify the primary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifyPrimary( int i ) {
+			int dep = primary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && primaryComp.compare( refArray[primary[child+1]], refArray[primary[child]] ) < 0 ) child++;
+				if ( primaryComp.compare( refArray[dep], refArray[primary[child]] ) <= 0 ) break;
+				primary[i] = primary[child];
+				i = child;
+			}
+			primary[i] = dep;
+		}
+
+		/** Heapify the secondary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifySecondary( int i ) {
+			int dep = secondary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && secondaryComp.compare( refArray[secondary[child+1]], refArray[secondary[child]] ) < 0 ) child++;
+				if ( secondaryComp.compare( refArray[dep], refArray[secondary[child]] ) <= 0 ) break;
+				secondary[i] = secondary[child]; inSec[secondary[i]] = i;
+				i = child;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+		}
+
+		/** Swim and heapify the primary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimPrimary( int i ) {
+			int dep = primary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( primaryComp.compare( refArray[primary[parent]], refArray[dep] ) <= 0 ) break;
+				primary[i] = primary[parent];
+				i = parent;
+			}
+			primary[i] = dep;
+			heapifyPrimary( i );
+		}
+
+		/** Swim and heapify the secondary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimSecondary( int i ) {
+			int dep = secondary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( secondaryComp.compare( refArray[secondary[parent]], refArray[dep] ) <= 0 ) break;
+				secondary[i] = secondary[parent]; inSec[secondary[i]] = i;
+				i = parent;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+			heapifySecondary( i );
+		}
+
+		/** Returns the minimum element with respect to the primary comparator.
+			@return the minimum element.
+		*/
+		public int top() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			return primary[0];
+		}
+
+		/** Returns the minimum element with respect to the secondary comparator.
+			@return the minimum element.
+		*/
+		public int secTop() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			return secondary[0];
+		}
+
+		/** Removes the minimum element with respect to the primary comparator.
+		 *  @return the removed element.
+		 */
+		public void remove() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			int result = primary[0];
+			int ins = inSec[result];
+			inSec[ result ] = -1;
+			// Copy a leaf 
+			primary[0] = primary[n-1];
+			if ( ins == n-1 ) {
+				n--;
+				heapifyPrimary( 0 );	
+				return;
+			}
+			secondary[ins] = secondary[n-1]; 
+			inSec[secondary[ins]] = ins;
+			// Heapify
+			n--;
+			heapifyPrimary( 0 );
+			swimSecondary( ins );
+		}
+
+		public void clear() {
+			while( size() != 0 ) remove();
+		}
+
+		/** Signals that the minimum element with respect to the comparator has changed.
+		 */
+		public void change() {
+			int ins = inSec[primary[0]];
+			heapifyPrimary( 0 );
+			swimSecondary( ins );
+		}
+
+		/** Returns the number of elements in the queue.
+		 *  @return the size of the queue
+		 */
+		public int size() {
+			return n;
+		}
+
+
+
+		public String toString() {
+			String s = "[";
+			for ( int i = 0; i < n; i++ )
+				s += refArray[primary[i]]+", ";
+			return s+ "]";
+		}
+	}
+
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) {
+		if ( sizea != sizeb ) return false;
+		while( sizea-- != 0 ) if ( a[sizea] != b[sizea] ) return false;
+		return true;
+	}
+
+	private static boolean invEqual( int inva[], int[] invb ) {
+		int i = inva.length;
+		while( i-- != 0 ) if ( inva[ i ] != invb[ i ] ) return false;
+		return true;
+	}
+
+
+
+	protected static void test( int n ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement;
+		int rm = 0, rt = 0;
+		KEY_TYPE[] refArray = new KEY_TYPE[ n ];
+
+		for( int i = 0; i < n; i++ ) refArray[ i ] = genKey();
+		  
+		HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE m = new HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE( refArray );
+		TestQueue t = new TestQueue( refArray, COMPARATORS.NATURAL_COMPARATOR, COMPARATORS.OPPOSITE_COMPARATOR );
+
+		/* We add pairs to t. */
+		for( int i = 0; i < n / 2;  i++ ) {
+			t.add( i );
+			m.enqueue( i );
+		}
+		
+		ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after creation (" + m + ", " + t + ")" );
+		ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after creation (" + m + ", " + t + ")" );
+		ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after creation (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+			if ( r.nextDouble() < 0.01 ) {
+				t.clear();
+				m.clear();
+				for( int j = 0; j < n / 2;  j++ ) {
+					t.add( j );
+					m.enqueue( j );
+				}
+			}
+
+			int T = r.nextInt( 2 * n );
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				m.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+
+			try {
+				t.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after enqueue (" + m + ", " + t + ")" );
+			ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after enqueue (" + m + ", " + t + ")" );
+			ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after enqueue (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+			
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + ", " + t.top() + ")");
+				ensure( m.secondaryFirst() == t.secTop(), "Error (" + seed + "): m and t differ in secondary first element after enqueue (" + m.secondaryFirst() + ", " + t.secTop() + ")");
+			}
+
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				rm = m.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				rt = t.top();
+				t.remove();
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after dequeue (" + m + ", " + t + ")" );
+			ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after dequeue (" + m + ", " + t + ")" );
+			ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after dequeue (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			if ( m.size() != 0 ) {
+				refArray[ m.first() ] = genKey();
+				
+				m.changed();
+				t.change();
+				
+				ensure( m.size() == t.size(), "Error (" + seed + "): m and t differ in size after change (" + m.size() + ", " + t.size() + ")");
+				
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + ", " + t.top() + ")");
+				ensure( m.secondaryFirst() == t.secTop(), "Error (" + seed + "): m and t differ in secondary first element after change (" + m.secondaryFirst() + ", " + t.secTop() + ")");
+			}
+
+		}
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		m.clear();
+		ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" );
+
+		System.out.println("Test OK");
+	}
+
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/HeapIndirectPriorityQueue.drv b/drv/HeapIndirectPriorityQueue.drv
new file mode 100644
index 0000000..3825ccb
--- /dev/null
+++ b/drv/HeapIndirectPriorityQueue.drv
@@ -0,0 +1,674 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+#endif
+
+import it.unimi.dsi.fastutil.ints.IntArrays;
+
+import java.util.NoSuchElementException;
+
+/** A type-specific heap-based indirect priority queue. 
+ *
+ * <P>Instances of this class use an additional <em>inversion array</em>, of the same length of the reference array,
+ * to keep track of the heap position containing a given element of the reference array. The priority queue is
+ * represented using a heap. The heap is enlarged as needed, but it is never
+ * shrunk. Use the {@link #trim()} method to reduce its size, if necessary.
+ *
+ * <P>This implementation does <em>not</em> allow one to enqueue several times the same index.
+ */
+
+public class HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC extends HEAP_SEMI_INDIRECT_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The inversion array. */
+	protected int inv[];
+
+	/** Creates a new empty queue with a given capacity and comparator.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		super( refArray, capacity, c );
+		if ( capacity > 0 ) this.heap = new int[ capacity ];
+
+		this.refArray = refArray;
+		this.c = c;
+
+		this.inv = new int[ refArray.length ];
+		IntArrays.fill( inv, -1 );
+	}
+
+	/** Creates a new empty queue with a given capacity and using the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 */
+	public HEAP_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) {
+		this( refArray, capacity, null );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and a given comparator.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, refArray.length, c );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and using the natural order. 
+	 * @param refArray the reference array.
+	 */
+	public HEAP_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray ) {
+		this( refArray, refArray.length, null );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, 0, c );
+		this.heap = a;
+		this.size = size;
+		int i = size;
+		while( i-- != 0 ) {
+			if ( inv[ a[ i ] ] != -1 ) throw new IllegalArgumentException( "Index " + a[ i ] + " appears twice in the heap" );
+			inv[ a[ i ] ] = i;
+		}
+		INDIRECT_HEAPS.makeHeap( refArray, a, inv, size, c );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, a, a.length, c );
+	}
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 */
+	public HEAP_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) {
+		this( refArray, a, size, null );
+	}
+
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 */
+	public HEAP_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) {
+		this( refArray, a, a.length );
+	}
+	
+	@SuppressWarnings("unchecked")
+	public void enqueue( final int x ) {
+		if ( inv[ x ] >= 0 ) throw new IllegalArgumentException( "Index " + x + " belongs to the queue" );
+
+		if ( size == heap.length ) heap = IntArrays.grow( heap, size + 1 );
+
+		inv[ heap[ size ] = x ] = size++;
+
+		INDIRECT_HEAPS.upHeap( refArray, heap, inv, size, size - 1, c );
+	}
+
+	public boolean contains( final int index ) {
+		return inv[ index ] >= 0;
+	}
+
+	public int dequeue() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		final int result = heap[ 0 ];
+		if ( --size != 0 ) inv[ heap[ 0 ] = heap[ size ] ] = 0;
+		inv[ result ] = -1;
+
+		if ( size != 0 ) INDIRECT_HEAPS.downHeap( refArray, heap, inv, size, 0, c );
+		return result;
+	}
+
+	public void changed() {
+		INDIRECT_HEAPS.downHeap( refArray, heap, inv, size, 0, c );
+	}
+
+	public void changed( final int index ) {
+		final int pos = inv[ index ];
+		if ( pos < 0 ) throw new IllegalArgumentException( "Index " + index + " does not belong to the queue" );
+		final int newPos = INDIRECT_HEAPS.upHeap( refArray, heap, inv, size, pos, c );
+		INDIRECT_HEAPS.downHeap( refArray, heap, inv, size, newPos, c );
+	}
+
+	/** Rebuilds this heap in a bottom-up fashion.
+	 */
+
+	public void allChanged() {
+		INDIRECT_HEAPS.makeHeap( refArray, heap, inv, size, c );
+	}
+
+
+	public boolean remove( final int index ) {
+		final int result = inv[ index ];
+		if ( result < 0 ) return false;
+		inv[ index ] = -1;
+
+		if ( result < --size ) {
+			inv[ heap[ result ] = heap[ size ] ] = result;
+			final int newPos = INDIRECT_HEAPS.upHeap( refArray, heap, inv, size, result, c );
+			INDIRECT_HEAPS.downHeap( refArray, heap, inv, size, newPos, c );
+		}
+		
+		return true;
+	}
+
+
+	public void clear() {
+		size = 0;
+		IntArrays.fill( inv, -1 );
+	}
+
+
+#ifdef TEST
+
+	/** The original class, now just used for testing. */
+
+
+	private static class TestQueue {
+
+		/** The reference array */
+		private KEY_TYPE refArray[];
+		/** Its length */
+		private int N;
+		/** The number of elements in the heaps */
+		private int n;
+		/** The two comparators */
+		private KEY_COMPARATOR primaryComp, secondaryComp;
+		/** Two indirect heaps are used, called <code>primary</code> and <code>secondary</code>. Each of them contains
+			a permutation of <code>n</code> among the indices 0, 1, ..., <code>N</code>-1 in such a way that the corresponding
+			objects be sorted with respect to the two comparators.
+			We also need an array <code>inSec[]</code> so that <code>inSec[k]</code> is the index of <code>secondary</code> 
+			containing <code>k</code>.
+		*/
+		private int primary[], secondary[], inSec[];
+
+		/** Builds a double indirect priority queue.
+		 *  @param refArray The reference array.
+		 *  @param primaryComp The primary comparator.
+		 *  @param secondaryComp The secondary comparator.
+		 */
+		public TestQueue( KEY_TYPE refArray[], KEY_COMPARATOR primaryComp, KEY_COMPARATOR secondaryComp ) {
+			this.refArray = refArray;
+			this.N = refArray.length;
+			assert this.N != 0;
+			this.n = 0;
+			this.primaryComp = primaryComp;
+			this.secondaryComp = secondaryComp;
+			this.primary = new int[N];
+			this.secondary = new int[N];
+			this.inSec = new int[N];
+			java.util.Arrays.fill( inSec, -1 );
+		}
+
+		/** Adds an index to the queue. Notice that the index should not be already present in the queue.
+		 *  @param i The index to be added
+		 */
+		public void add( int i ) {
+			if ( i < 0 || i >= refArray.length ) throw new IndexOutOfBoundsException();
+			if ( inSec[ i ] >= 0 ) throw new IllegalArgumentException();
+			primary[n] = i;
+			secondary[n] = i; inSec[i] = n;
+			n++;
+			swimPrimary( n-1 );
+			swimSecondary( n-1 );
+		}
+
+		/** Heapify the primary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifyPrimary( int i ) {
+			int dep = primary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && primaryComp.compare( refArray[primary[child+1]], refArray[primary[child]] ) < 0 ) child++;
+				if ( primaryComp.compare( refArray[dep], refArray[primary[child]] ) <= 0 ) break;
+				primary[i] = primary[child];
+				i = child;
+			}
+			primary[i] = dep;
+		}
+
+		/** Heapify the secondary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifySecondary( int i ) {
+			int dep = secondary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && secondaryComp.compare( refArray[secondary[child+1]], refArray[secondary[child]] ) < 0 ) child++;
+				if ( secondaryComp.compare( refArray[dep], refArray[secondary[child]] ) <= 0 ) break;
+				secondary[i] = secondary[child]; inSec[secondary[i]] = i;
+				i = child;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+		}
+
+		/** Swim and heapify the primary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimPrimary( int i ) {
+			int dep = primary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( primaryComp.compare( refArray[primary[parent]], refArray[dep] ) <= 0 ) break;
+				primary[i] = primary[parent];
+				i = parent;
+			}
+			primary[i] = dep;
+			heapifyPrimary( i );
+		}
+
+		/** Swim and heapify the secondary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimSecondary( int i ) {
+			int dep = secondary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( secondaryComp.compare( refArray[secondary[parent]], refArray[dep] ) <= 0 ) break;
+				secondary[i] = secondary[parent]; inSec[secondary[i]] = i;
+				i = parent;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+			heapifySecondary( i );
+		}
+
+		/** Returns the minimum element with respect to the primary comparator.
+			@return the minimum element.
+		*/
+		public int top() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			return primary[0];
+		}
+
+		/** Returns the minimum element with respect to the secondary comparator.
+			@return the minimum element.
+		*/
+		public int secTop() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			return secondary[0];
+		}
+
+		/** Removes the minimum element with respect to the primary comparator.
+		 *  @return the removed element.
+		 */
+		public boolean remove() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			if ( inSec[primary[0]] == -1 ) return false;
+			int result = primary[0];
+			int ins = inSec[result];
+			inSec[ result ] = -1;
+			// Copy a leaf 
+			primary[0] = primary[n-1];
+			if ( ins == n-1 ) {
+				n--;
+				heapifyPrimary( 0 );	
+				return true;
+			}
+			secondary[ins] = secondary[n-1]; 
+			inSec[secondary[ins]] = ins;
+			// Heapify
+			n--;
+			heapifyPrimary( 0 );
+			swimSecondary( ins );
+			return true;
+		}
+
+		public void clear() {
+			while( size() != 0 ) remove();
+		}
+
+		public void remove( int index ) {
+			if ( index >= refArray.length ) throw new IndexOutOfBoundsException();
+			if ( inSec[index] == -1 ) return;
+			int ins = inSec[index];
+			inSec[ index ] = -1;
+			// Copy a leaf 
+			primary[ins] = primary[n-1];
+			if ( ins == n-1 ) {
+				n--;
+				swimPrimary( ins );	
+				return;
+			}
+			secondary[ins] = secondary[n-1]; 
+			inSec[secondary[ins]] = ins;
+			// Heapify
+			n--;
+			swimPrimary( ins );
+			swimSecondary( ins );
+		}
+
+		/** Signals that the minimum element with respect to the comparator has changed.
+		 */
+		public void change() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			if ( inSec[primary[0]] == -1 ) throw new IllegalArgumentException();
+			int ins = inSec[primary[0]];
+			heapifyPrimary( 0 );
+			swimSecondary( ins );
+		}
+
+		public void change(int index) {
+			if ( index >= refArray.length ) throw new IndexOutOfBoundsException();
+			if ( inSec[index] == -1 ) throw new IllegalArgumentException();
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			int ins = inSec[ index ];
+			swimPrimary( ins );
+			swimSecondary( ins );
+		}
+
+		/** Returns the number of elements in the queue.
+		 *  @return the size of the queue
+		 */
+		public int size() {
+			return n;
+		}
+
+
+
+		public String toString() {
+			String s = "[";
+			for ( int i = 0; i < n; i++ )
+				s += refArray[primary[i]]+", ";
+			return s+ "]";
+		}
+	}
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) {
+		if ( sizea != sizeb ) return false;
+		while( sizea-- != 0 ) if ( a[sizea] != b[sizea] ) return false;
+		return true;
+	}
+
+	private static boolean invEqual( int inva[], int[] invb ) {
+		int i = inva.length;
+		while( i-- != 0 ) if ( inva[ i ] != invb[ i ] ) return false;
+		return true;
+	}
+
+	protected static void test( int n ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement;
+		int rm = 0, rt = 0;
+		KEY_TYPE[] refArray = new KEY_TYPE[ n ];
+
+		for( int i = 0; i < n; i++ ) refArray[ i ] = genKey();
+		  
+		HEAP_INDIRECT_PRIORITY_QUEUE m = new HEAP_INDIRECT_PRIORITY_QUEUE( refArray,  COMPARATORS.NATURAL_COMPARATOR );
+		TestQueue t = new TestQueue( refArray, COMPARATORS.NATURAL_COMPARATOR, COMPARATORS.NATURAL_COMPARATOR );
+
+		/* We add pairs to t. */
+		for( int i = 0; i < n / 2;  i++ ) {
+			t.add( i );
+			m.enqueue( i );
+		}
+
+		ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after creation (" + m + ", " + t + ")" );
+		ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after creation (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+			if ( r.nextDouble() < 0.01 ) {
+				t.clear();
+				m.clear();
+				for( int j = 0; j < n / 2;  j++ ) {
+					t.add( j );
+					m.enqueue( j );
+				}
+			}
+
+			int T = r.nextInt( 2 * n );
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				m.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+
+			try {
+				t.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after enqueue (" + m + ", " + t + ")" );
+			ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after enqueue (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+			
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + ", " + t.top() + ")");
+			}
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				rm = m.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				rt = t.top();
+				t.remove();
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after dequeue (" + m + ", " + t + ")" );
+			ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after dequeue (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+			
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.first() + ", " + t.top() + ")");
+			}
+
+
+			int pos = r.nextInt( n * 2 );
+
+			try {
+				m.remove( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				t.remove( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): remove(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): remove(int) divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): remove(int) divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in remove(int) between t and m (" + rt + ", " + rm + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after remove(int) (" + m + ", " + t + ")" );
+			ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after remove(int) (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after remove(int) (" + m.first() + ", " + t.top() + ")");
+			}
+
+
+			pos = r.nextInt( n * 2 );
+
+			try {
+				m.changed( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				t.change( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): change(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): change(int) divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): change(int) divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in change(int) between t and m (" + rt + ", " + rm + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change(int) (" + m + ", " + t + ")" );
+			ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after change(int) (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change(int) (" + m.first() + ", " + t.top() + ")");
+			}
+
+			if ( m.size() != 0 ) {
+
+				refArray[ m.first() ] = genKey();
+
+				m.changed();
+				t.change();
+				
+				ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change (" + m + ", " + t + ")" );
+				ensure( invEqual( m.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after change (" + java.util.Arrays.toString( m.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+				
+				if ( m.size() != 0 ) {
+					ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + ", " + t.top() + ")");
+				}
+			}
+		}
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		m.clear();
+		ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" );
+
+		System.out.println("Test OK");
+	}
+
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/HeapPriorityQueue.drv b/drv/HeapPriorityQueue.drv
new file mode 100644
index 0000000..61c9bdf
--- /dev/null
+++ b/drv/HeapPriorityQueue.drv
@@ -0,0 +1,386 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+
+import it.unimi.dsi.fastutil.AbstractPriorityQueue;
+#endif
+
+import java.util.NoSuchElementException;
+
+
+/** A type-specific heap-based priority queue.
+ *
+ * <P>Instances of this class represent a priority queue using a heap. The heap is enlarged as needed, but
+ * it is never shrunk. Use the {@link #trim()} method to reduce its size, if necessary.
+ */
+
+public class HEAP_PRIORITY_QUEUE KEY_GENERIC extends ABSTRACT_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The heap array. */
+	@SuppressWarnings("unchecked")
+	protected KEY_GENERIC_TYPE[] heap = KEY_GENERIC_ARRAY_CAST ARRAYS.EMPTY_ARRAY;
+
+	/** The number of elements in this queue. */
+	protected int size;
+	
+	/** The type-specific comparator used in this queue. */
+	protected KEY_COMPARATOR KEY_SUPER_GENERIC c;
+
+	/** Creates a new empty queue with a given capacity and comparator.
+	 *
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	@SuppressWarnings("unchecked")
+	public HEAP_PRIORITY_QUEUE( int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		if ( capacity > 0 ) this.heap = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ capacity ];
+		this.c = c;
+	}
+
+	/** Creates a new empty queue with a given capacity and using the natural order.
+	 *
+	 * @param capacity the initial capacity of this queue.
+	 */
+	public HEAP_PRIORITY_QUEUE( int capacity ) {
+		this( capacity, null );
+	}
+
+	/** Creates a new empty queue with a given comparator.
+	 *
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_PRIORITY_QUEUE( KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( 0, c );
+	}
+
+	/** Creates a new empty queue using the natural order. 
+	 */
+	public HEAP_PRIORITY_QUEUE() {
+		this( 0, null );
+	}
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param a an array.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( c );
+		this.heap = a;
+		this.size = size;
+		HEAPS.makeHeap( a, size, c );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param a an array.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( a, a.length, c );
+	}
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param a an array.
+	 * @param size the number of elements to be included in the queue.
+	 */
+	public HEAP_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a, int size ) {
+		this( a, size, null );
+	}
+
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param a an array.
+	 */
+	public HEAP_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] a ) {
+		this( a, a.length );
+	}
+
+	@SuppressWarnings("unchecked")
+	public void enqueue( KEY_GENERIC_TYPE x ) {
+		if ( size == heap.length ) heap = ARRAYS.grow( heap, size + 1 );
+
+		heap[ size++ ] = x;
+		HEAPS.upHeap( heap, size, size - 1, c );
+	}
+
+	public KEY_GENERIC_TYPE DEQUEUE() {
+		if ( size == 0 ) throw new NoSuchElementException();
+
+		final KEY_GENERIC_TYPE result = heap[ 0 ];
+		heap[ 0 ] = heap[ --size ];
+#if #keyclass(Object)
+		heap[ size ] = null;
+#endif
+		if ( size != 0 ) HEAPS.downHeap( heap, size, 0, c );
+		return result;
+	}
+
+	public KEY_GENERIC_TYPE FIRST() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		return heap[ 0 ];
+	}
+
+	public void changed() {
+		HEAPS.downHeap( heap, size, 0, c );
+	}
+
+	public int size() { return size; }
+
+	public void clear() { 
+#if #keyclass(Object)
+		ObjectArrays.fill( heap, 0, size, null );
+#endif
+		size = 0; 
+	}
+
+	/** Trims the underlying heap array so that it has exactly {@link #size()} elements.
+	 */
+
+	public void trim() {
+		heap = ARRAYS.trim( heap, size );
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return c; }
+
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static boolean heapEqual( KEY_TYPE[] a, KEY_TYPE[] b, int sizea, int sizeb ) {
+		if ( sizea != sizeb ) return false;
+		KEY_TYPE[] aa = (KEY_TYPE[])a.clone();
+		KEY_TYPE[] bb = (KEY_TYPE[])b.clone();
+		java.util.Arrays.sort( aa, 0, sizea );
+		java.util.Arrays.sort( bb, 0, sizeb );
+		while( sizea-- != 0 ) if ( ! KEY_EQUALS( aa[sizea], bb[sizea] ) ) return false;
+		return true;
+	}
+
+	private static KEY_TYPE k[];
+
+	protected static void test( int n ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement;
+		KEY_TYPE rm = KEY_NULL, rt = KEY_NULL;
+		k = new KEY_TYPE[ n ];
+
+		for( int i = 0; i < n; i++ ) k[i] = genKey();
+
+		HEAP_PRIORITY_QUEUE m = new HEAP_PRIORITY_QUEUE( COMPARATORS.NATURAL_COMPARATOR );
+		ARRAY_PRIORITY_QUEUE t = new ARRAY_PRIORITY_QUEUE( COMPARATORS.NATURAL_COMPARATOR );
+
+		/* We add pairs to t. */
+		for( int i = 0; i < n / 2;  i++ ) {
+			t.enqueue( k[ i ] );
+			m.enqueue( k[ i ] );
+		}
+
+		ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after creation (" + m + ", " + t + ")" );
+
+		if ( m.size() != 0 ) {
+			ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after creation (" + m.FIRST() + ", " + t.FIRST() + ")");
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+
+			if ( r.nextDouble() < 0.01 ) {
+				t.clear();
+				m.clear();
+				for( int j = 0; j < n / 2;  j++ ) {
+					t.enqueue( k[ j ] );
+					m.enqueue( k[ j ] );
+				}
+			}
+
+			KEY_TYPE T = genKey();
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				m.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+
+			try {
+				t.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+
+			ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after enqueue (" + m + ", " + t + ")" );
+			
+			if ( m.size() != 0 ) {
+				ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.FIRST() + ", " + t.FIRST() + ")");
+			}
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				rm = m.DEQUEUE();
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				rt = t.DEQUEUE();
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" );
+
+
+			ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after dequeue (" + m + ", " + t + ")");
+			
+			if ( m.size() != 0 ) {
+				ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.FIRST() + ", " + t.FIRST() + ")");
+			}
+
+			HEAP_PRIORITY_QUEUE m2 = new HEAP_PRIORITY_QUEUE( t.array, t.size() );
+			ARRAY_PRIORITY_QUEUE t2 = new ARRAY_PRIORITY_QUEUE( m.heap, m.size() );
+			m = m2;
+			t = t2;
+
+			ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after wrap (" + m + ", " + t + ")");
+			
+			if ( m.size() != 0 ) {
+				ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after wrap (" + m.FIRST() + ", " + t.FIRST() + ")");
+			}
+
+			if ( m.size() != 0 && ( ( new OPEN_HASH_SET( m.heap, 0, m.size ) ).size() == m.size() ) ) {
+
+				int j = t.size(), M = --j;
+#if #keys(primitive)
+				while( j-- != 0 ) if ( KEY_LESS( t.array[ j ], t.array[ M ] ) ) M = j;
+#else
+				while( j-- != 0 ) if ( ((Comparable)t.array[ j ]).compareTo( t.array[ M ] )< 0 ) M = j;
+#endif
+
+				m.heap[ 0 ] = t.array[ M ] = genKey();
+
+				m.changed();
+				t.changed();
+				
+				ensure( heapEqual( m.heap, t.array, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change (" + m + ", " + t + ")");
+				
+				if ( m.size() != 0 ) {
+					ensure( KEY_EQUALS(m.FIRST(), t.FIRST()), "Error (" + seed + "): m and t differ in first element after change (" + m.FIRST() + ", " + t.FIRST() + ")");
+				}
+			}
+		}
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		m.clear();
+		ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" );
+
+		System.out.println("Test OK");
+	}
+
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/HeapSemiIndirectPriorityQueue.drv b/drv/HeapSemiIndirectPriorityQueue.drv
new file mode 100644
index 0000000..cc03acf
--- /dev/null
+++ b/drv/HeapSemiIndirectPriorityQueue.drv
@@ -0,0 +1,572 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+import it.unimi.dsi.fastutil.IndirectPriorityQueue;
+#endif
+
+import java.util.NoSuchElementException;
+
+import it.unimi.dsi.fastutil.ints.IntArrays;
+import it.unimi.dsi.fastutil.AbstractIndirectPriorityQueue;
+
+/** A type-specific heap-based semi-indirect priority queue. 
+ *
+ * <P>Instances of this class use as reference list a <em>reference array</em>,
+ * which must be provided to each constructor. The priority queue is
+ * represented using a heap. The heap is enlarged as needed, but it is never
+ * shrunk. Use the {@link #trim()} method to reduce its size, if necessary.
+ *
+ * <P>This implementation allows one to enqueue several time the same index, but
+ * you must be careful when calling {@link #changed()}.
+ */
+
+public class HEAP_SEMI_INDIRECT_PRIORITY_QUEUE KEY_GENERIC extends AbstractIndirectPriorityQueue<KEY_GENERIC_CLASS> implements INDIRECT_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The reference array. */
+	protected KEY_GENERIC_TYPE refArray[];
+
+	/** The semi-indirect heap. */
+	protected int heap[] = IntArrays.EMPTY_ARRAY;
+
+	/** The number of elements in this queue. */
+	protected int size;
+	
+	/** The type-specific comparator used in this queue. */
+	protected KEY_COMPARATOR KEY_SUPER_GENERIC c;
+
+	/** Creates a new empty queue without elements with a given capacity and comparator.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		if ( capacity > 0 ) this.heap = new int[ capacity ];
+		this.refArray = refArray;
+		this.c = c;
+	}
+
+	/** Creates a new empty queue with given capacity and using the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 */
+	public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) {
+		this( refArray, capacity, null );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and a given comparator.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, refArray.length, c );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and using the natural order. 
+	 * @param refArray the reference array.
+	 */
+	public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray ) {
+		this( refArray, refArray.length, null );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, 0, c );
+		this.heap = a;
+		this.size = size;
+		SEMI_INDIRECT_HEAPS.makeHeap( refArray, a, size, c );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, a, a.length, c );
+	}
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 */
+	public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) {
+		this( refArray, a, size, null );
+	}
+
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 */
+	public HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) {
+		this( refArray, a, a.length );
+	}
+
+	/** Ensures that the given index is a valid reference.
+	 *
+	 * @param index an index in the reference array.
+	 * @throws IndexOutOfBoundsException if the given index is negative or larger than the reference array length.
+	 */
+	protected void ensureElement( final int index ) {
+		if ( index < 0 )  throw new IndexOutOfBoundsException( "Index (" + index + ") is negative" );
+		if ( index >= refArray.length ) throw new IndexOutOfBoundsException( "Index (" + index + ") is larger than or equal to reference array size (" + refArray.length + ")" );
+	}
+	
+	public void enqueue( int x ) {
+		ensureElement( x );
+
+		if ( size == heap.length ) heap = IntArrays.grow( heap, size + 1 );
+
+		heap[ size++ ] = x;
+		SEMI_INDIRECT_HEAPS.upHeap( refArray, heap, size, size - 1, c );
+	}
+
+	public int dequeue() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		final int result = heap[ 0 ];
+		heap[ 0 ] = heap[ --size ];
+		if ( size != 0 ) SEMI_INDIRECT_HEAPS.downHeap( refArray, heap, size, 0, c );
+		return result;
+	}
+
+	public int first() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		return heap[ 0 ];
+	}
+
+	/** {@inheritDoc}
+	 *
+	 * <P>The caller <strong>must</strong> guarantee that when this method is called the
+	 * index of the first element appears just once in the queue. Failure to do so
+	 * will bring the queue in an inconsistent state, and will cause
+	 * unpredictable behaviour.
+	 */
+
+	public void changed() {
+		SEMI_INDIRECT_HEAPS.downHeap( refArray, heap, size, 0, c );
+	}
+
+	/** Rebuilds this heap in a bottom-up fashion.
+	 */
+
+	public void allChanged() {
+		SEMI_INDIRECT_HEAPS.makeHeap( refArray, heap, size, c );
+	}
+
+	public int size() { return size; }
+
+	public void clear() { size = 0; }
+
+	/** Trims the backing array so that it has exactly {@link #size()} elements.
+	 */
+
+	public void trim() {
+		heap = IntArrays.trim( heap, size );
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return c; }
+
+	public int front( final int[] a ) {
+		return c == null ? SEMI_INDIRECT_HEAPS.front( refArray, heap, size, a ) : SEMI_INDIRECT_HEAPS.front( refArray, heap, size, a, c );
+	}
+
+	public String toString() {
+		StringBuffer s = new StringBuffer();
+		s.append( "[" );
+		for ( int i = 0; i < size; i++ ) {
+			if ( i != 0 ) s.append( ", " );
+			s.append( refArray[ heap [ i ] ] );
+		}
+		s.append( "]" );
+		return s.toString();
+	}
+
+#ifdef TEST
+
+	/** The original class, now just used for testing. */
+
+	private static class TestQueue {
+
+		/** The reference array */
+		private KEY_TYPE refArray[];
+		/** Its length */
+		private int N;
+		/** The number of elements in the heaps */
+		private int n;
+		/** The two comparators */
+		private KEY_COMPARATOR primaryComp, secondaryComp;
+		/** Two indirect heaps are used, called <code>primary</code> and <code>secondary</code>. Each of them contains
+			a permutation of <code>n</code> among the indices 0, 1, ..., <code>N</code>-1 in such a way that the corresponding
+			objects be sorted with respect to the two comparators.
+			We also need an array <code>inSec[]</code> so that <code>inSec[k]</code> is the index of <code>secondary</code> 
+			containing <code>k</code>.
+		*/
+		private int primary[], secondary[], inSec[];
+
+		/** Builds a double indirect priority queue.
+		 *  @param refArray The reference array.
+		 *  @param primaryComp The primary comparator.
+		 *  @param secondaryComp The secondary comparator.
+		 */
+		public TestQueue( KEY_TYPE refArray[], KEY_COMPARATOR primaryComp, KEY_COMPARATOR secondaryComp ) {
+			this.refArray = refArray;
+			this.N = refArray.length;
+			assert this.N != 0;
+			this.n = 0;
+			this.primaryComp = primaryComp;
+			this.secondaryComp = secondaryComp;
+			this.primary = new int[N];
+			this.secondary = new int[N];
+			this.inSec = new int[N];
+			java.util.Arrays.fill( inSec, -1 );
+		}
+
+		/** Adds an index to the queue. Notice that the index should not be already present in the queue.
+		 *  @param i The index to be added
+		 */
+		public void add( int i ) {
+			if ( i < 0 || i >= refArray.length ) throw new IndexOutOfBoundsException();
+			//if ( inSec[ i ] >= 0 ) throw new IllegalArgumentException();
+			primary[n] = i;
+			n++;
+			swimPrimary( n-1 );
+		}
+
+		/** Heapify the primary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifyPrimary( int i ) {
+			int dep = primary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && primaryComp.compare( refArray[primary[child+1]], refArray[primary[child]] ) < 0 ) child++;
+				if ( primaryComp.compare( refArray[dep], refArray[primary[child]] ) <= 0 ) break;
+				primary[i] = primary[child];
+				i = child;
+			}
+			primary[i] = dep;
+		}
+
+		/** Heapify the secondary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifySecondary( int i ) {
+			int dep = secondary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && secondaryComp.compare( refArray[secondary[child+1]], refArray[secondary[child]] ) < 0 ) child++;
+				if ( secondaryComp.compare( refArray[dep], refArray[secondary[child]] ) <= 0 ) break;
+				secondary[i] = secondary[child]; inSec[secondary[i]] = i;
+				i = child;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+		}
+
+		/** Swim and heapify the primary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimPrimary( int i ) {
+			int dep = primary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( primaryComp.compare( refArray[primary[parent]], refArray[dep] ) <= 0 ) break;
+				primary[i] = primary[parent];
+				i = parent;
+			}
+			primary[i] = dep;
+			heapifyPrimary( i );
+		}
+
+		/** Swim and heapify the secondary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimSecondary( int i ) {
+			int dep = secondary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( secondaryComp.compare( refArray[secondary[parent]], refArray[dep] ) <= 0 ) break;
+				secondary[i] = secondary[parent]; inSec[secondary[i]] = i;
+				i = parent;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+			heapifySecondary( i );
+		}
+
+		/** Returns the minimum element with respect to the primary comparator.
+			@return the minimum element.
+		*/
+		public int top() {
+			if ( n == 0 ) throw new NoSuchElementException();
+			return primary[0];
+		}
+
+		/** Returns the minimum element with respect to the secondary comparator.
+			@return the minimum element.
+		*/
+		public int secTop() {
+			if ( n == 0 ) throw new NoSuchElementException();
+			return secondary[0];
+		}
+
+		/** Removes the minimum element with respect to the primary comparator.
+		 *  @return the removed element.
+		 */
+		public void remove() {
+			if ( n == 0 ) throw new NoSuchElementException();
+			int result = primary[0];
+			// Copy a leaf 
+			primary[0] = primary[n-1];
+			n--;
+			heapifyPrimary( 0 );	
+			return;
+		}
+
+		public void clear() {
+			while( size() != 0 ) remove();
+		}
+
+		/** Signals that the minimum element with respect to the comparator has changed.
+		 */
+		public void change() {
+			heapifyPrimary( 0 );
+		}
+
+		/** Returns the number of elements in the queue.
+		 *  @return the size of the queue
+		 */
+		public int size() {
+			return n;
+		}
+
+		public String toString() {
+			String s = "[";
+			for ( int i = 0; i < n; i++ )
+				s += refArray[primary[i]]+", ";
+			return s+ "]";
+		}
+	}
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) {
+		if ( sizea != sizeb ) return false;
+		while( sizea-- != 0 ) if ( a[sizea] != b[sizea] ) return false;
+		return true;
+	}
+
+	protected static void test( int n ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement;
+		int rm = 0, rt = 0;
+		KEY_TYPE[] refArray = new KEY_TYPE[ n ];
+
+		for( int i = 0; i < n; i++ ) refArray[ i ] = genKey();
+		  
+		HEAP_SEMI_INDIRECT_PRIORITY_QUEUE m = new HEAP_SEMI_INDIRECT_PRIORITY_QUEUE( refArray,  COMPARATORS.NATURAL_COMPARATOR );
+		TestQueue t = new TestQueue( refArray, COMPARATORS.NATURAL_COMPARATOR, COMPARATORS.OPPOSITE_COMPARATOR );
+
+		/* We add pairs to t. */
+		for( int i = 0; i < n / 2;  i++ ) {
+			t.add( i );
+			m.enqueue( i );
+		}
+
+		ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after creation (" + m + ", " + t + ")" );
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+			if ( r.nextDouble() < 0.01 ) {
+				t.clear();
+				m.clear();
+				for( int j = 0; j < n / 2;  j++ ) {
+					t.add( j );
+					m.enqueue( j );
+				}
+			}
+
+			int T = r.nextInt( 2 * n );
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				m.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+
+			try {
+				t.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after enqueue (" + m + ", " + t + ")" );
+			
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + ", " + t.top() + ")");
+			}
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				rm = m.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				rt = t.top();
+				t.remove();
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" );
+
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after dequeue (" + m + ", " + t + ")");
+			
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.first() + ", " + t.top() + ")");
+			}
+
+			if ( m.size() != 0 && ( ( new it.unimi.dsi.fastutil.ints.IntOpenHashSet( m.heap, 0, m.size ) ).size() == m.size() ) ) {
+
+				refArray[ m.first() ] = genKey();
+
+				m.changed();
+				t.change();
+				
+				ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ after change (" + m + ", " + t + ")");
+				
+				if ( m.size() != 0 ) {
+					ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + ", " + t.top() + ")");
+				}
+			}
+		}
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		m.clear();
+		ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" );
+
+		System.out.println("Test OK");
+	}
+
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+}
diff --git a/drv/HeapSesquiIndirectDoublePriorityQueue.drv b/drv/HeapSesquiIndirectDoublePriorityQueue.drv
new file mode 100644
index 0000000..e398ce7
--- /dev/null
+++ b/drv/HeapSesquiIndirectDoublePriorityQueue.drv
@@ -0,0 +1,667 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+
+import it.unimi.dsi.fastutil.IndirectDoublePriorityQueue;
+#endif
+
+/** A type-specific heap-based sesqui-indirect double priority queue.
+ *
+ * <P>Instances of this class are based on a semi-indirect and an indirect
+ * heap-based queues. The queues are enlarged as needed, but they are never
+ * shrunk. Use the {@link #trim()} method to reduce their size, if necessary.
+ *
+ * <P>Either comparator may be <code>null</code>, indicating that natural comparison should take place. Of course,
+ * it makes little sense having them equal.
+ */
+
+public class HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE KEY_GENERIC extends HEAP_SEMI_INDIRECT_PRIORITY_QUEUE KEY_GENERIC implements INDIRECT_DOUBLE_PRIORITY_QUEUE KEY_GENERIC {
+
+	/** The secondary indirect queue. */
+	protected HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC secondaryQueue;
+
+	/** Creates a new empty queue with a given capacity.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c, KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		super( refArray, capacity, c );
+		secondaryQueue = new HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC( refArray, capacity, d );
+	}
+
+
+	/** Creates a new empty queue with a given capacity.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	@SuppressWarnings("unchecked")
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		super( refArray, capacity, c );
+		secondaryQueue = new HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC( refArray, capacity, c == null ? COMPARATORS.OPPOSITE_COMPARATOR : COMPARATORS.oppositeComparator( c ) );
+	}
+
+
+	/** Creates a new empty queue with a given capacity and natural order as primary comparator.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param capacity the initial capacity of this queue.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, int capacity ) {
+		this( refArray, capacity, null );
+	}
+
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c, KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		this( refArray, refArray.length, c, d );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray, KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, refArray.length, c );
+	}
+
+	/** Creates a new empty queue with capacity equal to the length of the reference array and natural order as primary comparator.
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( KEY_GENERIC_TYPE[] refArray ) {
+		this( refArray, refArray.length, null );
+	}
+
+
+	/** Wraps a given array in a queue using the given comparators.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c, final KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		super( refArray, a, size, c );
+		this.secondaryQueue = new HEAP_INDIRECT_PRIORITY_QUEUE KEY_GENERIC( refArray, a.clone(), size, d );
+	}
+
+	/** Wraps a given array in a queue using the given comparators.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first elements of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 * @param d the secondary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c, final KEY_COMPARATOR KEY_SUPER_GENERIC d ) {
+		this( refArray, a, a.length, c, d );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	@SuppressWarnings("unchecked")
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, a, size, c, c == null ? COMPARATORS.OPPOSITE_COMPARATOR : COMPARATORS.oppositeComparator( c ) );
+	}
+
+
+	/** Wraps a given array in a queue using a given comparator.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite order of <code>c</code>.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param c the primary comparator used in this queue, or <code>null</code> for the natural order.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		this( refArray, a, a.length, c );
+	}
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The first <code>size</code> element of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 * @param size the number of elements to be included in the queue.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a, int size ) {
+		this( refArray, a, size, null );
+	}
+
+
+	/** Wraps a given array in a queue using the natural order.
+	 *
+	 * <P>The queue returned by this method will be backed by the given array.
+	 * The elements of the array will be rearranged so to form a heap, and
+	 * moreover the array will be cloned and wrapped in a secondary queue (this is
+	 * more efficient than enqueing the elements of <code>a</code> one by one).
+	 *
+	 * <P>This constructor uses as secondary comparator the opposite of the natural order.
+	 *
+	 * @param refArray the reference array.
+	 * @param a an array of indices into <code>refArray</code>.
+	 */
+	public HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( final KEY_GENERIC_TYPE[] refArray, final int[] a ) {
+		this( refArray, a, a.length );
+	}
+
+	public void enqueue( int x ) {
+		secondaryQueue.enqueue( x );
+		super.enqueue( x );
+	}
+
+	public int dequeue() {
+		final int result = super.dequeue();
+		secondaryQueue.remove( result );
+		return result;
+	}
+
+	public int secondaryFirst() {
+		return secondaryQueue.first();
+	}
+
+	public int secondaryLast() { throw new UnsupportedOperationException(); }
+
+	public int secondaryFront( final int[] a ) {
+		return secondaryQueue.front( a );
+	}
+
+	public void changed() {
+		secondaryQueue.changed( heap[ 0 ] );
+		super.changed();
+	}
+
+	public void allChanged() {
+		secondaryQueue.allChanged();
+		super.allChanged();
+	}
+
+	public void clear() { 
+		super.clear();
+		secondaryQueue.clear();
+	}
+
+	/** Trims the underlying queues so they have exactly {@link #size()} elements.
+	 */
+
+	public void trim() {
+		super.trim();
+		secondaryQueue.trim();
+	}
+
+	/** Returns the secondary comparator of this queue.
+	 *
+	 * @return the secondary comparator of this queue.
+	 * @see #secondaryFirst()
+	 */
+	public KEY_COMPARATOR KEY_SUPER_GENERIC secondaryComparator() { return secondaryQueue.comparator(); }
+
+#ifdef TEST
+
+	/** The original class, now just used for testing. */
+
+	private static class TestQueue {
+
+		/** The reference array */
+		private KEY_TYPE refArray[];
+		/** Its length */
+		private int N;
+		/** The number of elements in the heaps */
+		private int n;
+		/** The two comparators */
+		private KEY_COMPARATOR primaryComp, secondaryComp;
+		/** Two indirect heaps are used, called <code>primary</code> and <code>secondary</code>. Each of them contains
+			a permutation of <code>n</code> among the indices 0, 1, ..., <code>N</code>-1 in such a way that the corresponding
+			objects be sorted with respect to the two comparators.
+			We also need an array <code>inSec[]</code> so that <code>inSec[k]</code> is the index of <code>secondary</code> 
+			containing <code>k</code>.
+		*/
+		private int primary[], secondary[], inSec[];
+
+		/** Builds a double indirect priority queue.
+		 *  @param refArray The reference array.
+		 *  @param primaryComp The primary comparator.
+		 *  @param secondaryComp The secondary comparator.
+		 */
+		public TestQueue( KEY_TYPE refArray[], KEY_COMPARATOR primaryComp, KEY_COMPARATOR secondaryComp ) {
+			this.refArray = refArray;
+			this.N = refArray.length;
+			assert this.N != 0;
+			this.n = 0;
+			this.primaryComp = primaryComp;
+			this.secondaryComp = secondaryComp;
+			this.primary = new int[N];
+			this.secondary = new int[N];
+			this.inSec = new int[N];
+			java.util.Arrays.fill( inSec, -1 );
+		}
+
+		/** Adds an index to the queue. Notice that the index should not be already present in the queue.
+		 *  @param i The index to be added
+		 */
+		public void add( int i ) {
+			if ( i < 0 || i >= refArray.length ) throw new IndexOutOfBoundsException();
+			if ( inSec[ i ] >= 0 ) throw new IllegalArgumentException();
+			primary[n] = i;
+			secondary[n] = i; inSec[i] = n;
+			n++;
+			swimPrimary( n-1 );
+			swimSecondary( n-1 );
+		}
+
+		/** Heapify the primary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifyPrimary( int i ) {
+			int dep = primary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && primaryComp.compare( refArray[primary[child+1]], refArray[primary[child]] ) < 0 ) child++;
+				if ( primaryComp.compare( refArray[dep], refArray[primary[child]] ) <= 0 ) break;
+				primary[i] = primary[child];
+				i = child;
+			}
+			primary[i] = dep;
+		}
+
+		/** Heapify the secondary heap.
+		 *  @param i The index of the heap to be heapified.
+		 */
+		private void heapifySecondary( int i ) {
+			int dep = secondary[i];
+			int child;
+
+			while ( ( child = 2*i+1 ) < n ) {
+				if ( child+1 < n && secondaryComp.compare( refArray[secondary[child+1]], refArray[secondary[child]] ) < 0 ) child++;
+				if ( secondaryComp.compare( refArray[dep], refArray[secondary[child]] ) <= 0 ) break;
+				secondary[i] = secondary[child]; inSec[secondary[i]] = i;
+				i = child;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+		}
+
+		/** Swim and heapify the primary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimPrimary( int i ) {
+			int dep = primary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( primaryComp.compare( refArray[primary[parent]], refArray[dep] ) <= 0 ) break;
+				primary[i] = primary[parent];
+				i = parent;
+			}
+			primary[i] = dep;
+			heapifyPrimary( i );
+		}
+
+		/** Swim and heapify the secondary heap.
+		 *  @param i The index to be moved.
+		 */
+		private void swimSecondary( int i ) {
+			int dep = secondary[i];
+			int parent;
+
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( secondaryComp.compare( refArray[secondary[parent]], refArray[dep] ) <= 0 ) break;
+				secondary[i] = secondary[parent]; inSec[secondary[i]] = i;
+				i = parent;
+			}
+			secondary[i] = dep; inSec[secondary[i]] = i;
+			heapifySecondary( i );
+		}
+
+		/** Returns the minimum element with respect to the primary comparator.
+			@return the minimum element.
+		*/
+		public int top() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			return primary[0];
+		}
+
+		/** Returns the minimum element with respect to the secondary comparator.
+			@return the minimum element.
+		*/
+		public int secTop() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			return secondary[0];
+		}
+
+		/** Removes the minimum element with respect to the primary comparator.
+		 *  @return the removed element.
+		 */
+		public void remove() {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			int result = primary[0];
+			int ins = inSec[result];
+			inSec[ result ] = -1;
+			// Copy a leaf 
+			primary[0] = primary[n-1];
+			if ( ins == n-1 ) {
+				n--;
+				heapifyPrimary( 0 );	
+				return;
+			}
+			secondary[ins] = secondary[n-1]; 
+			inSec[secondary[ins]] = ins;
+			// Heapify
+			n--;
+			heapifyPrimary( 0 );
+			swimSecondary( ins );
+		}
+
+		public void clear() {
+			while( size() != 0 ) remove();
+		}
+
+		public void remove( int index ) {
+			if ( n == 0 ) throw new java.util.NoSuchElementException();
+			int result = primary[index];
+			int ins = inSec[result];
+			inSec[ result ] = -1;
+			// Copy a leaf 
+			primary[index] = primary[n-1];
+			if ( ins == n-1 ) {
+				n--;
+				swimPrimary( index );	
+				return;
+			}
+			secondary[ins] = secondary[n-1]; 
+			inSec[secondary[ins]] = ins;
+			// Heapify
+			n--;
+			swimPrimary( index );
+			swimSecondary( ins );
+		}
+
+		/** Signals that the minimum element with respect to the comparator has changed.
+		 */
+		public void change() {
+			int ins = inSec[primary[0]];
+			heapifyPrimary( 0 );
+			swimSecondary( ins );
+		}
+
+		public void change(int index) {
+			int ins = inSec[primary[index]];
+			swimPrimary( index );
+			swimSecondary( ins );
+		}
+
+		/** Returns the number of elements in the queue.
+		 *  @return the size of the queue
+		 */
+		public int size() {
+			return n;
+		}
+
+
+
+		public String toString() {
+			String s = "[";
+			for ( int i = 0; i < n; i++ )
+				s += refArray[primary[i]]+", ";
+			return s+ "]";
+		}
+	}
+
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else 
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) {
+		if ( sizea != sizeb ) return false;
+		while( sizea-- != 0 ) if ( a[sizea] != b[sizea] ) return false;
+		return true;
+	}
+
+	private static boolean invEqual( int inva[], int[] invb ) {
+		int i = inva.length;
+		while( i-- != 0 ) if ( inva[ i ] != invb[ i ] ) return false;
+		return true;
+	}
+
+
+
+	protected static void test( int n ) {
+		long ms;
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement;
+		int rm = 0, rt = 0;
+		KEY_TYPE[] refArray = new KEY_TYPE[ n ];
+
+		for( int i = 0; i < n; i++ ) refArray[ i ] = genKey();
+		  
+		HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE m = new HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE( refArray );
+		TestQueue t = new TestQueue( refArray, COMPARATORS.NATURAL_COMPARATOR, COMPARATORS.OPPOSITE_COMPARATOR );
+
+		/* We add pairs to t. */
+		for( int i = 0; i < n / 2;  i++ ) {
+			t.add( i );
+			m.enqueue( i );
+		}
+		
+		ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after creation (" + m + ", " + t + ")" );
+		ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after creation (" + m + ", " + t + ")" );
+		ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after creation (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<2*n;  i++ ) {
+			if ( r.nextDouble() < 0.01 ) {
+				t.clear();
+				m.clear();
+				for( int j = 0; j < n / 2;  j++ ) {
+					t.add( j );
+					m.enqueue( j );
+				}
+			}
+
+			int T = r.nextInt( 2 * n );
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				m.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+
+			try {
+				t.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after enqueue (" + m + ", " + t + ")" );
+			ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after enqueue (" + m + ", " + t + ")" );
+			ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after enqueue (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+			
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after enqueue (" + m.first() + ", " + t.top() + ")");
+				ensure( m.secondaryFirst() == t.secTop(), "Error (" + seed + "): m and t differ in secondary first element after enqueue (" + m.secondaryFirst() + ", " + t.secTop() + ")");
+			}
+
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				rm = m.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) { mThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = e; }
+
+			try {
+				rt = t.top();
+				t.remove();
+			}
+			catch ( IndexOutOfBoundsException e ) { tThrowsOutOfBounds = e; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = e; }
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = e; }
+
+			ensure( ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ), "Error (" + seed + "): dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" );
+			ensure( ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ), "Error (" + seed + "): dequeue() divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( ( mThrowsNoElement == null ) == ( tThrowsNoElement == null ), "Error (" + seed + "): dequeue() divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			if ( mThrowsOutOfBounds == null ) ensure( rt == rm , "Error (" + seed + "): divergence in dequeue() between t and m (" + rt + ", " + rm + ")" );
+
+			ensure( heapEqual( m.heap, t.primary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in primary heap after dequeue (" + m + ", " + t + ")" );
+			ensure( heapEqual( m.secondaryQueue.heap, t.secondary, m.size(), t.size() ), "Error (" + seed + "): m and t differ in secondary heap after dequeue (" + m + ", " + t + ")" );
+			ensure( invEqual( m.secondaryQueue.inv, t.inSec ), "Error (" + seed + "): m and t differ in inversion arrays after dequeue (" + java.util.Arrays.toString( m.secondaryQueue.inv ) + ", " + java.util.Arrays.toString( t.inSec ) + ")" );
+
+			if ( m.size() != 0 ) {
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after dequeue (" + m.first() + ", " + t.top() + ")");
+				ensure( m.secondaryFirst() == t.secTop(), "Error (" + seed + "): m and t differ in secondary first element after dequeue (" + m.secondaryFirst() + ", " + t.secTop() + ")");
+			}
+
+			if ( m.size() != 0 ) {
+				refArray[ m.first() ] = genKey();
+				
+				m.changed();
+				t.change();
+				
+				ensure( m.size() == t.size(), "Error (" + seed + "): m and t differ in size after change (" + m.size() + ", " + t.size() + ")");
+				
+				ensure( m.first() == t.top(), "Error (" + seed + "): m and t differ in first element after change (" + m.first() + ", " + t.top() + ")");
+				ensure( m.secondaryFirst() == t.secTop(), "Error (" + seed + "): m and t differ in secondary first element after change (" + m.secondaryFirst() + ", " + t.secTop() + ")");
+			}
+		}
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		m.clear();
+		ensure( m.isEmpty(), "Error (" + seed + "): m is not empty after clear()" );
+
+		System.out.println("Test OK");
+	}
+
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/Heaps.drv b/drv/Heaps.drv
new file mode 100644
index 0000000..55fea73
--- /dev/null
+++ b/drv/Heaps.drv
@@ -0,0 +1,118 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+#endif
+
+/** A class providing static methods and objects that do useful things with heaps.
+ *
+ * <P>The static methods of this class allow to treat arrays as 0-based heaps. They
+ * are used in the implementation of heap-based queues, but they may be also used
+ * directly.
+ *
+ */
+
+public class HEAPS {
+
+	private HEAPS() {}
+
+	/** Moves the given element down into the heap until it reaches the lowest possible position.
+	 *
+	 * @param heap the heap (starting at 0).
+	 * @param size the number of elements in the heap.
+	 * @param i the index of the element that must be moved down.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 * @return the new position of the element of index <code>i</code>.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC int downHeap( final KEY_GENERIC_TYPE[] heap, final int size, int i, final KEY_COMPARATOR KEY_SUPER_GENERIC c ) {
+		if ( i >= size ) throw new IllegalArgumentException( "Heap position (" + i + ") is larger than or equal to heap size (" + size + ")" );
+
+		final KEY_GENERIC_TYPE e = heap[ i ];
+		int child;
+
+		if ( c == null )
+			while ( ( child = 2 * i + 1 ) < size ) {
+				if ( child + 1 < size && KEY_LESS( heap[ child + 1 ], heap[ child ] ) ) child++;
+				if ( KEY_LESSEQ( e, heap[ child ] ) ) break;
+				heap[ i ] = heap[ child ];
+				i = child;
+			}
+		else 
+			while ( ( child = 2 * i + 1 ) < size ) {
+				if ( child + 1 < size && c.compare( heap[ child + 1 ], heap[ child ] ) < 0 ) child++;
+				if ( c.compare( e, heap[ child ] ) <= 0 ) break;
+				heap[ i ] = heap[ child ];
+				i = child;
+			}
+
+		heap[ i ] = e;
+
+		return i;
+	}
+
+	/** Moves the given element up in the heap until it reaches the highest possible position.
+	 *
+	 * @param heap the heap (starting at 0).
+	 * @param size the number of elements in the heap.
+	 * @param i the index of the element that must be moved up.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 * @return the new position of the element of index <code>i</code>.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC int upHeap( final KEY_GENERIC_TYPE[] heap, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) {
+		if ( i >= size ) throw new IllegalArgumentException( "Heap position (" + i + ") is larger than or equal to heap size (" + size + ")" );
+
+		final KEY_GENERIC_TYPE e = heap[ i ];
+		int parent;
+
+		if ( c == null )
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( KEY_LESSEQ( heap[ parent ], e ) ) break;
+				heap[ i ] = heap[ parent ]; 
+				i = parent;
+			}
+		else
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( c.compare( heap[ parent ], e ) <= 0 ) break;
+				heap[ i ] = heap[ parent ]; 
+				i = parent;
+			}
+
+		heap[ i ] = e;
+
+		return i;
+	}
+
+	/** Makes an array into a heap.
+	 *
+	 * @param heap the heap (starting at 0).
+	 * @param size the number of elements in the heap.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 */
+
+	public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] heap, final int size, final KEY_COMPARATOR KEY_GENERIC c ) {
+		int i = size / 2;
+		while( i-- != 0 ) downHeap( heap, size, i, c );
+	}
+
+}
diff --git a/drv/IndirectDoublePriorityQueue.drv b/drv/IndirectDoublePriorityQueue.drv
new file mode 100644
index 0000000..ed42b2e
--- /dev/null
+++ b/drv/IndirectDoublePriorityQueue.drv
@@ -0,0 +1,37 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.IndirectDoublePriorityQueue;
+
+/** A type-specific {@link IndirectDoublePriorityQueue}. 
+ *
+ * <P>Additionally, this interface strengthens {@link #comparator()}.
+ */
+
+public interface INDIRECT_DOUBLE_PRIORITY_QUEUE extends INDIRECT_PRIORITY_QUEUE, IndirectDoublePriorityQueue<KEY_CLASS> {
+
+    /** Returns the secondary comparator associated with this queue.
+	 *
+	 * Note that this specification strengthens the one given in {@link IndirectDoublePriorityQueue}.
+	 *
+	 * @return the comparator associated with this queue.
+	 * @see IndirectDoublePriorityQueue#comparator()
+	 */
+	KEY_COMPARATOR secondaryComparator();
+}
diff --git a/drv/IndirectHeaps.drv b/drv/IndirectHeaps.drv
new file mode 100644
index 0000000..5c6cb49
--- /dev/null
+++ b/drv/IndirectHeaps.drv
@@ -0,0 +1,159 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+#endif
+
+import it.unimi.dsi.fastutil.ints.IntArrays;
+
+/** A class providing static methods and objects that do useful things with indirect heaps.
+ *
+ * <P>An indirect heap is an extension of a semi-indirect heap using also an
+ * <em>inversion array</em> of the same length as the reference array,
+ * satisfying the relation <code>heap[inv[i]]==i</code> when
+ * <code>inv[i]>=0</code>, and <code>inv[heap[i]]==i</code> for all elements in the heap.
+ */
+
+public class INDIRECT_HEAPS {
+
+	private INDIRECT_HEAPS() {}
+
+	/** Moves the given element down into the indirect heap until it reaches the lowest possible position.
+	 *
+	 * @param refArray the reference array.
+	 * @param heap the indirect heap (starting at 0).
+	 * @param inv the inversion array.
+	 * @param size the number of elements in the heap.
+	 * @param i the index in the heap of the element to be moved down.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 * @return the new position in the heap of the element of heap index <code>i</code>.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC int downHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int[] inv, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) {
+		if ( i >= size ) throw new IllegalArgumentException( "Heap position (" + i + ") is larger than or equal to heap size (" + size + ")" );
+
+		final int e = heap[ i ];
+		final KEY_GENERIC_TYPE E = refArray[ e ];
+		int child;
+
+		if ( c == null )
+			while ( ( child = 2 * i + 1 ) < size ) {
+				if ( child + 1 < size && KEY_LESS( refArray[ heap[ child + 1 ] ], refArray[ heap[ child ] ] ) ) child++;
+				if ( KEY_LESSEQ( E, refArray[ heap[ child ] ] ) ) break;
+				heap[ i ] = heap[ child ];
+				inv[ heap[ i ] ] = i;
+				i = child;
+			}
+		else 
+			while ( ( child = 2 * i + 1 ) < size ) {
+				if ( child + 1 < size && c.compare( refArray[ heap[ child + 1 ] ], refArray[ heap[ child ] ] ) < 0 ) child++;
+				if ( c.compare( E, refArray[ heap[ child ] ] ) <= 0 ) break;
+				heap[ i ] = heap[ child ];
+				inv[ heap[ i ] ] = i;
+				i = child;
+			}
+
+		heap[ i ] = e;
+		inv[ e ] = i;
+		return i;
+	}
+
+	/** Moves the given element up in the indirect heap until it reaches the highest possible position.
+	 *
+	 * Note that in principle after this call the heap property may be violated.
+	 * 
+	 * @param refArray the reference array.
+	 * @param heap the indirect heap (starting at 0).
+	 * @param inv the inversion array.
+	 * @param size the number of elements in the heap.
+	 * @param i the index in the heap of the element to be moved up.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 * @return the new position in the heap of the element of heap index <code>i</code>.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC int upHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int[] inv, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) {
+		if ( i >= size ) throw new IllegalArgumentException( "Heap position (" + i + ") is larger than or equal to heap size (" + size + ")" );
+
+		final int e = heap[ i ];
+		final KEY_GENERIC_TYPE E = refArray[ e ];
+		int parent;
+
+		if ( c == null )
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( KEY_LESSEQ( refArray[ heap[ parent ] ], E ) ) break;
+				heap[ i ] = heap[ parent ];
+				inv[ heap[ i ] ] = i;
+				i = parent;
+			}
+		else
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( c.compare( refArray[ heap[ parent ] ], E ) <= 0 ) break;
+				heap[ i ] = heap[ parent ]; 
+				inv[ heap[ i ] ] = i;
+				i = parent;
+			}
+
+		heap[ i ] = e;
+		inv[ e ] = i;
+
+		return i;
+	}
+
+	/** Creates an indirect heap in the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param offset the first element of the reference array to be put in the heap.
+	 * @param length the number of elements to be put in the heap.
+	 * @param heap the array where the heap is to be created.
+	 * @param inv the inversion array.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 */
+
+	public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] refArray, final int offset, final int length, final int[] heap, final int[] inv, final KEY_COMPARATOR KEY_GENERIC c ) {
+		ARRAYS.ensureOffsetLength( refArray, offset, length );
+		if ( heap.length < length ) throw new IllegalArgumentException( "The heap length (" + heap.length + ") is smaller than the number of elements (" + length + ")" );
+		if ( inv.length < refArray.length ) throw new IllegalArgumentException( "The inversion array length (" + heap.length + ") is smaller than the length of the reference array (" + refArray.length + ")" );
+
+		IntArrays.fill( inv, 0, refArray.length, -1 );
+
+		int i = length;
+		while( i-- != 0 ) inv[ heap[ i ] = offset + i ] = i;
+
+		i = length / 2;
+		while( i-- != 0 ) downHeap( refArray, heap, inv, length, i, c );
+	}
+
+
+	/** Creates an indirect heap from a given index array.
+	 *
+	 * @param refArray the reference array.
+	 * @param heap an array containing indices into <code>refArray</code>.
+	 * @param inv the inversion array.
+	 * @param size the number of elements in the heap.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 */
+
+	public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int[] inv, final int size, final KEY_COMPARATOR KEY_GENERIC c ) {
+		int i = size / 2;
+		while( i-- != 0 ) downHeap( refArray, heap, inv, size, i, c );
+	}
+}
diff --git a/drv/IndirectPriorityQueue.drv b/drv/IndirectPriorityQueue.drv
new file mode 100644
index 0000000..53ed436
--- /dev/null
+++ b/drv/IndirectPriorityQueue.drv
@@ -0,0 +1,37 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.IndirectPriorityQueue;
+
+/** A type-specific {@link IndirectPriorityQueue}. 
+ *
+ * <P>Additionally, this interface strengthens {@link #comparator()}.
+ */
+
+public interface INDIRECT_PRIORITY_QUEUE extends IndirectPriorityQueue<KEY_CLASS> {
+
+    /** Returns the comparator associated with this queue.
+	 *
+	 * Note that this specification strengthens the one given in {@link IndirectPriorityQueue}.
+	 *
+	 * @return the comparator associated with this queue.
+	 * @see IndirectPriorityQueue#comparator()
+	 */
+	KEY_COMPARATOR comparator();
+}
diff --git a/drv/Iterable.drv b/drv/Iterable.drv
new file mode 100644
index 0000000..ea716fd
--- /dev/null
+++ b/drv/Iterable.drv
@@ -0,0 +1,40 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.lang.Iterable;
+
+/** A type-specific {@link Iterable} that strengthens that specification of {@link Iterable#iterator()}.
+ *
+ * <p><strong>Warning</strong>: Java will let you write “colon” <code>for</code> statements with primitive-type
+ * loop variables; however, what is (unfortunately) really happening is that at each iteration an
+ * unboxing (and, in the case of <code>fastutil</code> type-specific data structures, a boxing) will be performed. Watch out.
+ *
+ * @see Iterable
+ */
+
+public interface KEY_ITERABLE KEY_GENERIC extends Iterable<KEY_GENERIC_CLASS> {
+
+	/** Returns a type-specific iterator.
+	 *
+	 * Note that this specification strengthens the one given in {@link Iterable#iterator()}.
+	 *
+	 * @return a type-specific iterator.
+	 */
+	KEY_ITERATOR KEY_GENERIC iterator();
+}
diff --git a/drv/Iterator.drv b/drv/Iterator.drv
new file mode 100644
index 0000000..047c8a5
--- /dev/null
+++ b/drv/Iterator.drv
@@ -0,0 +1,57 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Iterator;
+
+/** A type-specific {@link Iterator}; provides an additional method to avoid (un)boxing, and
+ * the possibility to skip elements.
+ *
+ * @see Iterator
+ */
+
+public interface KEY_ITERATOR KEY_GENERIC extends Iterator<KEY_GENERIC_CLASS> {
+
+
+#if #keys(primitive)
+
+	/**
+	 * Returns the next element as a primitive type.
+	 *
+	 * @return the next element in the iteration.
+	 * @see Iterator#next()
+	 */
+
+	KEY_TYPE NEXT_KEY();
+
+#endif
+
+
+	/** Skips the given number of elements.
+	 *
+	 * <P>The effect of this call is exactly the same as that of
+	 * calling {@link #next()} for <code>n</code> times (possibly stopping
+	 * if {@link #hasNext()} becomes false).
+	 *
+	 * @param n the number of elements to skip.
+	 * @return the number of elements actually skipped.
+	 * @see Iterator#next()
+	 */
+
+	int skip( int n );
+}
diff --git a/drv/Iterators.drv b/drv/Iterators.drv
new file mode 100644
index 0000000..38bebf1
--- /dev/null
+++ b/drv/Iterators.drv
@@ -0,0 +1,838 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Iterator;
+import java.util.ListIterator;
+import java.util.NoSuchElementException;
+
+/** A class providing static methods and objects that do useful things with type-specific iterators.
+ *
+ * @see Iterator
+ */
+
+public class ITERATORS {
+
+	private ITERATORS() {}
+	
+	/** A class returning no elements and a type-specific iterator interface.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific iterator.
+	 */
+
+	public static class EmptyIterator KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC implements java.io.Serializable, Cloneable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected EmptyIterator() {}
+
+		public boolean hasNext() { return false; }
+		public boolean hasPrevious() { return false; }
+		public KEY_GENERIC_TYPE NEXT_KEY() { throw new NoSuchElementException(); }
+		public KEY_GENERIC_TYPE PREV_KEY() { throw new NoSuchElementException(); }
+		public int nextIndex() { return 0; }
+		public int previousIndex() { return -1; }
+		public int skip( int n ) { return 0; };
+		public int back( int n ) { return 0; };
+		public Object clone() { return EMPTY_ITERATOR; }
+        private Object readResolve() { return EMPTY_ITERATOR; }
+	}
+
+	/** An empty iterator (immutable). It is serializable and cloneable.
+	 *
+	 * <P>The class of this objects represent an abstract empty iterator
+	 * that can iterate as a type-specific (list) iterator.
+	 */
+
+	@SuppressWarnings("rawtypes")
+	public final static EmptyIterator EMPTY_ITERATOR = new EmptyIterator();
+
+
+	/** An iterator returning a single element. */
+
+	private static class SingletonIterator KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC {
+		private final KEY_GENERIC_TYPE element;
+		private int curr;
+
+		public SingletonIterator( final KEY_GENERIC_TYPE element ) {
+			this.element = element;
+		}
+	
+		public boolean hasNext() { return curr == 0; }
+		public boolean hasPrevious() { return curr == 1; }
+
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			curr = 1;
+			return element;
+		}
+
+		public KEY_GENERIC_TYPE PREV_KEY() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+			curr = 0;
+			return element;
+		}
+
+		public int nextIndex() {
+			return curr;
+		}
+
+		public int previousIndex() {
+			return curr - 1;
+		}
+	}
+
+
+	/** Returns an iterator that iterates just over the given element.
+	 *
+	 * @param element the only element to be returned by a type-specific list iterator.
+	 * @return  an iterator that iterates just over <code>element</code>.
+	 */
+	public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) {
+		return new SingletonIterator KEY_GENERIC( element );
+	}
+
+
+	/** A class to wrap arrays in iterators. */
+
+	private static class ArrayIterator KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC {
+		private final KEY_GENERIC_TYPE[] array;
+		private final int offset, length;
+		private int curr;
+
+		public ArrayIterator( final KEY_GENERIC_TYPE[] array, final int offset, final int length ) {
+			this.array = array;
+			this.offset = offset;
+			this.length = length;
+		}
+	
+		public boolean hasNext() { return curr < length; }
+		public boolean hasPrevious() { return curr > 0; }
+
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			return array[ offset + curr++ ];
+		}
+
+		public KEY_GENERIC_TYPE PREV_KEY() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+			return array[ offset + --curr ];
+		}
+
+		public int skip( int n ) {
+			if ( n <= length - curr ) {
+				curr += n;
+				return n;
+			}
+			n = length - curr;
+			curr = length;
+			return n;
+		}
+
+		public int back( int n ) {
+			if ( n <= curr ) {
+				curr -= n;
+				return n;
+			}
+			n = curr;
+			curr = 0;
+			return n;
+		}
+
+		public int nextIndex() {
+			return curr;
+		}
+
+		public int previousIndex() {
+			return curr - 1;
+		}
+	}
+
+
+	/** Wraps the given part of an array into a type-specific list iterator.
+	 *
+	 * <P>The type-specific list iterator returned by this method will iterate
+	 * <code>length</code> times, returning consecutive elements of the given
+	 * array starting from the one with index <code>offset</code>.
+	 *
+	 * @param array an array to wrap into a type-specific list iterator.
+	 * @param offset the first element of the array to be returned.
+	 * @param length the number of elements to return.
+	 */
+	public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC wrap( final KEY_GENERIC_TYPE[] array, final int offset, final int length ) {
+		ARRAYS.ensureOffsetLength( array, offset, length );
+		return new ArrayIterator KEY_GENERIC( array, offset, length );
+	}
+
+	/** Wraps the given array into a type-specific list iterator.
+	 *
+	 * <P>The type-specific list iterator returned by this method will return
+	 * all elements of the given array.
+	 *
+	 * @param array an array to wrap into a type-specific list iterator.
+	 */
+	public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC wrap( final KEY_GENERIC_TYPE[] array ) {
+		return new ArrayIterator KEY_GENERIC( array, 0, array.length );
+	}
+
+
+	/** Unwraps an iterator into an array starting at a given offset for a given number of elements.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and stores the elements
+	 * returned, up to a maximum of <code>length</code>, in the given array starting at <code>offset</code>.
+	 * The number of actually unwrapped elements is returned (it may be less than <code>max</code> if
+	 * the iterator emits less than <code>max</code> elements).
+	 *
+	 * @param i a type-specific iterator.
+	 * @param array an array to contain the output of the iterator.
+	 * @param offset the first element of the array to be returned.
+	 * @param max the maximum number of elements to unwrap.
+	 * @return the number of elements unwrapped.
+	 */
+	public static KEY_GENERIC int unwrap( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, final KEY_GENERIC_TYPE array[], int offset, final int max ) {
+		if ( max < 0 ) throw new IllegalArgumentException( "The maximum number of elements (" + max + ") is negative" );
+		if ( offset < 0 || offset + max > array.length ) throw new IllegalArgumentException();
+		int j = max;
+		while( j-- != 0 && i.hasNext() ) array[ offset++ ] = i.NEXT_KEY();
+		return max - j - 1;
+	}
+
+	/** Unwraps an iterator into an array.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and stores the
+	 * elements returned in the given array. The iteration will stop when the
+	 * iterator has no more elements or when the end of the array has been reached.
+	 *
+	 * @param i a type-specific iterator.
+	 * @param array an array to contain the output of the iterator.
+	 * @return the number of elements unwrapped.
+	 */
+	public static KEY_GENERIC int unwrap( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, final KEY_GENERIC_TYPE array[] ) {
+		return unwrap( i, array, 0, array.length );
+	}
+
+	/** Unwraps an iterator, returning an array, with a limit on the number of elements.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and returns an array
+	 * containing the elements returned by the iterator. At most <code>max</code> elements
+	 * will be returned.
+	 *
+	 * @param i a type-specific iterator.
+	 * @param max the maximum number of elements to be unwrapped.
+	 * @return an array containing the elements returned by the iterator (at most <ocde>max</code>).
+	 */
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] unwrap( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i, int max ) {
+		if ( max < 0 ) throw new IllegalArgumentException( "The maximum number of elements (" + max + ") is negative" );
+		KEY_GENERIC_TYPE array[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ 16 ];
+		int j = 0;
+
+		while( max-- != 0 && i.hasNext() ) {
+			if ( j == array.length ) array = ARRAYS.grow( array, j + 1 );
+			array[ j++ ] = i.NEXT_KEY();
+		}
+
+		return ARRAYS.trim( array, j );
+	}
+
+
+	/** Unwraps an iterator, returning an array.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and returns an array
+	 * containing the elements returned by the iterator.
+	 *
+	 * @param i a type-specific iterator.
+	 * @return an array containing the elements returned by the iterator.
+	 */
+
+	public static KEY_GENERIC KEY_GENERIC_TYPE[] unwrap( final STD_KEY_ITERATOR KEY_EXTENDS_GENERIC i ) {
+		return unwrap( i, Integer.MAX_VALUE );
+	}
+
+
+	/** Unwraps an iterator into a type-specific collection,  with a limit on the number of elements.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and stores the elements
+	 * returned, up to a maximum of <code>max</code>, in the given type-specific collection.
+	 * The number of actually unwrapped elements is returned (it may be less than <code>max</code> if
+	 * the iterator emits less than <code>max</code> elements).
+	 *
+	 * @param i a type-specific iterator.
+	 * @param c a type-specific collection array to contain the output of the iterator.
+	 * @param max the maximum number of elements to unwrap.
+	 * @return the number of elements unwrapped. Note that
+	 * this is the number of elements returned by the iterator, which is not necessarily the number
+	 * of elements that have been added to the collection (because of duplicates).
+	 */
+	public static KEY_GENERIC int unwrap( final STD_KEY_ITERATOR KEY_GENERIC i, final COLLECTION KEY_SUPER_GENERIC c, final int max ) {
+		if ( max < 0 ) throw new IllegalArgumentException( "The maximum number of elements (" + max + ") is negative" );
+		int j = max;
+		while( j-- != 0 && i.hasNext() ) c.add( i.NEXT_KEY() );
+		return max - j - 1;
+	}
+
+	/** Unwraps an iterator into a type-specific collection.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and stores the
+	 * elements returned in the given type-specific collection. The returned count on the number
+	 * unwrapped elements is a long, so that it will work also with very large collections.
+	 *
+	 * @param i a type-specific iterator.
+	 * @param c a type-specific collection to contain the output of the iterator.
+	 * @return the number of elements unwrapped. Note that
+	 * this is the number of elements returned by the iterator, which is not necessarily the number
+	 * of elements that have been added to the collection (because of duplicates).
+	 */
+	public static KEY_GENERIC long unwrap( final STD_KEY_ITERATOR KEY_GENERIC i, final COLLECTION KEY_SUPER_GENERIC c ) {
+		long n = 0;
+		while( i.hasNext() ) {
+			c.add( i.NEXT_KEY() );
+			n++;
+		}
+		return n;
+	}
+
+
+	/** Pours an iterator into a type-specific collection, with a limit on the number of elements.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and adds
+	 * the returned elements to the given collection (up to <code>max</code>).
+	 *
+	 * @param i a type-specific iterator.
+	 * @param s a type-specific collection.
+	 * @param max the maximum number of elements to be poured.
+	 * @return the number of elements poured. Note that
+	 * this is the number of elements returned by the iterator, which is not necessarily the number
+	 * of elements that have been added to the collection (because of duplicates).
+	 */
+
+	public static KEY_GENERIC int pour( final STD_KEY_ITERATOR KEY_GENERIC i, final COLLECTION KEY_SUPER_GENERIC s, final int max ) {
+		if ( max < 0 ) throw new IllegalArgumentException( "The maximum number of elements (" + max + ") is negative" );
+		int j = max;
+		while( j-- != 0 && i.hasNext() ) s.add( i.NEXT_KEY() );
+		return max - j - 1;
+	}
+
+	/** Pours an iterator into a type-specific collection.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and adds
+	 * the returned elements to the given collection.
+	 *
+	 * @param i a type-specific iterator.
+	 * @param s a type-specific collection.
+	 * @return the number of elements poured. Note that
+	 * this is the number of elements returned by the iterator, which is not necessarily the number
+	 * of elements that have been added to the collection (because of duplicates).
+	 */
+
+	public static KEY_GENERIC int pour( final STD_KEY_ITERATOR KEY_GENERIC i, final COLLECTION KEY_SUPER_GENERIC s ) {
+		return pour( i, s, Integer.MAX_VALUE );
+	}
+
+	/** Pours an iterator, returning a type-specific list, with a limit on the number of elements.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and returns
+	 * a type-specific list containing the returned elements (up to <code>max</code>). Iteration
+	 * on the returned list is guaranteed to produce the elements in the same order
+	 * in which they appeared in the iterator.
+	 *
+	 *
+	 * @param i a type-specific iterator.
+	 * @param max the maximum number of elements to be poured.
+	 * @return a type-specific list containing the returned elements, up to <code>max</code>.
+	 */
+
+	public static KEY_GENERIC LIST KEY_GENERIC pour( final STD_KEY_ITERATOR KEY_GENERIC i, int max ) {
+		final ARRAY_LIST KEY_GENERIC l = new ARRAY_LIST KEY_GENERIC();
+		pour( i, l, max );
+		l.trim();
+		return l;
+	}
+
+	/** Pours an iterator, returning a type-specific list.
+	 *
+	 * <P>This method iterates over the given type-specific iterator and returns
+	 * a list containing the returned elements. Iteration
+	 * on the returned list is guaranteed to produce the elements in the same order
+	 * in which they appeared in the iterator.
+	 *
+	 * @param i a type-specific iterator.
+	 * @return a type-specific list containing the returned elements.
+	 */
+
+	public static KEY_GENERIC LIST KEY_GENERIC pour( final STD_KEY_ITERATOR KEY_GENERIC i ) {
+		return pour( i, Integer.MAX_VALUE );
+	}
+
+	private static class IteratorWrapper KEY_GENERIC extends KEY_ABSTRACT_ITERATOR KEY_GENERIC {
+		final Iterator<KEY_GENERIC_CLASS> i;
+
+		public IteratorWrapper( final Iterator<KEY_GENERIC_CLASS> i ) {
+			this.i = i;
+		}
+
+		public boolean hasNext() { return i.hasNext(); }
+		public void remove() { i.remove(); }
+
+		public KEY_GENERIC_TYPE NEXT_KEY() { return KEY_CLASS2TYPE( i.next() ); }
+	}
+
+	/** Wraps a standard iterator into a type-specific iterator.
+	 *
+	 * <P>This method wraps a standard iterator into a type-specific one which will handle the
+	 * type conversions for you. Of course, any attempt to wrap an iterator returning the
+	 * instances of the wrong class will generate a {@link ClassCastException}. The
+	 * returned iterator is backed by <code>i</code>: changes to one of the iterators
+	 * will affect the other, too.
+	 *
+	 * <P>If <code>i</code> is already type-specific, it will returned and no new object
+	 * will be generated.
+	 *
+	 * @param i an iterator.
+	 * @return a type-specific iterator  backed by <code>i</code>.
+	 */
+	@SuppressWarnings({ "rawtypes", "unchecked" })
+ 	public static KEY_GENERIC KEY_ITERATOR KEY_GENERIC AS_KEY_ITERATOR( final Iterator KEY_GENERIC i ) {
+		if ( i instanceof KEY_ITERATOR ) return (KEY_ITERATOR KEY_GENERIC)i;
+		return new IteratorWrapper KEY_GENERIC( i );
+	}
+
+
+	private static class ListIteratorWrapper KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC {
+		final ListIterator<KEY_GENERIC_CLASS> i;
+
+		public ListIteratorWrapper( final ListIterator<KEY_GENERIC_CLASS> i ) {
+			this.i = i;
+		}
+
+		public boolean hasNext() { return i.hasNext(); }
+		public boolean hasPrevious() { return i.hasPrevious(); }
+		public int nextIndex() { return i.nextIndex(); }
+		public int previousIndex() { return i.previousIndex(); }
+		@SuppressWarnings("unchecked")
+		public void set( KEY_GENERIC_TYPE k ) { i.set( KEY2OBJ( k ) ); }
+		@SuppressWarnings("unchecked")
+		public void add( KEY_GENERIC_TYPE k ) { i.add( KEY2OBJ( k ) ); }
+		public void remove() { i.remove(); }
+
+		public KEY_GENERIC_TYPE NEXT_KEY() { return KEY_CLASS2TYPE( i.next() ); }
+		public KEY_GENERIC_TYPE PREV_KEY() { return KEY_CLASS2TYPE( i.previous() ); }
+	}
+
+	/** Wraps a standard list iterator into a type-specific list iterator.
+	 *
+	 * <P>This method wraps a standard list iterator into a type-specific one
+	 * which will handle the type conversions for you. Of course, any attempt
+	 * to wrap an iterator returning the instances of the wrong class will
+	 * generate a {@link ClassCastException}. The
+	 * returned iterator is backed by <code>i</code>: changes to one of the iterators
+	 * will affect the other, too.
+	 *
+	 * <P>If <code>i</code> is already type-specific, it will returned and no new object
+	 * will be generated.
+	 *
+	 * @param i a list iterator.
+	 * @return a type-specific list iterator backed by <code>i</code>.
+	 */
+	@SuppressWarnings({ "rawtypes", "unchecked" })
+ 	public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC AS_KEY_ITERATOR( final ListIterator KEY_GENERIC i ) {
+		if ( i instanceof KEY_LIST_ITERATOR ) return (KEY_LIST_ITERATOR KEY_GENERIC)i;
+		return new ListIteratorWrapper KEY_GENERIC( i );
+	}
+
+
+#if #keyclass(Integer) || #keyclass(Byte) || #keyclass(Short) || #keyclass(Character) || #keyclass(Long)
+
+#if #keyclass(Long)
+	private static class IntervalIterator extends KEY_ABSTRACT_BIDI_ITERATOR {
+#else
+	private static class IntervalIterator extends KEY_ABSTRACT_LIST_ITERATOR {
+#endif
+		private final KEY_TYPE from, to;
+		KEY_TYPE curr;
+
+		public IntervalIterator( final KEY_TYPE from, final KEY_TYPE to ) {
+			this.from = this.curr = from;
+			this.to = to;
+		}
+
+		public boolean hasNext() { return curr < to; }
+		public boolean hasPrevious() { return curr > from; }
+
+		public KEY_TYPE NEXT_KEY() { 
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			return curr++; 
+		}
+		public KEY_TYPE PREV_KEY() { 
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+			return --curr; 
+		}
+
+#if ! #keyclass(Long)
+		public int nextIndex() { return curr - from; }
+		public int previousIndex() { return curr - from - 1; }
+#endif
+
+		public int skip( int n ) {
+			if ( curr + n <= to ) {
+				curr += n;
+				return n;
+			}
+#if ! #keyclass(Long)
+			n = to - curr;
+#else
+			n = (int)( to - curr );
+#endif
+			curr = to;
+			return n;
+		}
+
+		public int back( int n ) {
+			if ( curr - n >= from ) {
+				curr -= n;
+				return n;
+			}
+#if ! #keyclass(Long)
+			n = curr - from ;
+#else
+			n = (int)( curr - from );
+#endif
+			curr = from;
+			return n;
+		}
+	}
+		
+#if #keyclass(Long)
+	/** Creates a type-specific bidirectional iterator over an interval.
+	 *
+	 * <P>The type-specific bidirectional iterator returned by this method will return the
+	 * elements <code>from</code>, <code>from+1</code>,…, <code>to-1</code>.
+	 *
+	 * <P>Note that all other type-specific interval iterator are <em>list</em>
+	 * iterators. Of course, this is not possible with longs as the index
+	 * returned by {@link java.util.ListIterator#nextIndex() nextIndex()}/{@link
+	 * java.util.ListIterator#previousIndex() previousIndex()} would exceed an integer.
+	 *
+	 * @param from the starting element (inclusive).
+	 * @param to the ending element (exclusive).
+	 * @return a type-specific bidirectional iterator enumerating the elements from <code>from</code> to <code>to</code>.
+	 */
+	public static KEY_BIDI_ITERATOR fromTo( final KEY_TYPE from, final KEY_TYPE to ) {
+		return new IntervalIterator( from, to );
+	}
+#else
+
+	/** Creates a type-specific list iterator over an interval.
+	 *
+	 * <P>The type-specific list iterator returned by this method will return the
+	 * elements <code>from</code>, <code>from+1</code>,…, <code>to-1</code>.
+	 *
+	 * @param from the starting element (inclusive).
+	 * @param to the ending element (exclusive).
+	 * @return a type-specific list iterator enumerating the elements from <code>from</code> to <code>to</code>.
+	 */
+	public static KEY_LIST_ITERATOR fromTo( final KEY_TYPE from, final KEY_TYPE to ) {
+		return new IntervalIterator( from, to );
+	}
+
+#endif
+
+#endif
+
+	private static class IteratorConcatenator KEY_GENERIC extends KEY_ABSTRACT_ITERATOR KEY_GENERIC {
+		final KEY_ITERATOR KEY_EXTENDS_GENERIC a[];
+		int offset, length, lastOffset = -1;
+
+		public IteratorConcatenator( final KEY_ITERATOR KEY_EXTENDS_GENERIC a[], int offset, int length ) {
+			this.a = a;
+			this.offset = offset;
+			this.length = length;
+			advance();
+		}
+
+		private void advance() {
+			while( length != 0 ) {
+				if ( a[ offset ].hasNext() ) break;
+				length--;
+				offset++;
+			}
+
+			return;
+		}
+
+		public boolean hasNext() {
+			return length > 0;
+		}
+
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			KEY_GENERIC_TYPE next = a[ lastOffset = offset ].NEXT_KEY();
+			advance();
+			return next;
+		}
+
+		public void remove() {
+			if ( lastOffset == -1 ) throw new IllegalStateException();
+			a[ lastOffset ].remove();
+		}
+
+		public int skip( int n ) {
+			lastOffset = -1;
+
+			int skipped = 0;
+
+			while( skipped < n && length != 0 ) {
+				skipped += a[ offset ].skip( n - skipped );
+				if ( a[ offset ].hasNext() ) break;
+				length--;
+				offset++;
+			}
+			
+			return skipped;
+		}
+
+	}
+
+
+	/** Concatenates all iterators contained in an array.
+	 *
+	 * <P>This method returns an iterator that will enumerate in order the elements returned
+	 * by all iterators contained in the given array.
+	 *
+	 * @param a an array of iterators.
+	 * @return an iterator obtained by concatenation.
+	 */
+
+	public static KEY_GENERIC KEY_ITERATOR KEY_GENERIC concat( final KEY_ITERATOR KEY_EXTENDS_GENERIC a[] ) {
+		return concat( a, 0, a.length );
+	}
+
+
+	/** Concatenates a sequence of iterators contained in an array.
+	 *
+	 * <P>This method returns an iterator that will enumerate in order the elements returned
+	 * by <code>a[ offset ]</code>, then those returned 
+	 * by <code>a[ offset + 1 ]</code>, and so on up to 
+	 * <code>a[ offset + length - 1 ]</code>. 
+	 *
+	 * @param a an array of iterators.
+	 * @param offset the index of the first iterator to concatenate.
+	 * @param length the number of iterators to concatenate.
+	 * @return an iterator obtained by concatenation of <code>length</code> elements of <code>a</code> starting at <code>offset</code>.
+	 */
+
+	public static KEY_GENERIC KEY_ITERATOR KEY_GENERIC concat( final KEY_ITERATOR KEY_EXTENDS_GENERIC a[], final int offset, final int length ) {
+		return new IteratorConcatenator KEY_GENERIC( a, offset, length );
+	}
+
+
+  	/** An unmodifiable wrapper class for iterators. */
+
+
+	public static class UnmodifiableIterator KEY_GENERIC extends KEY_ABSTRACT_ITERATOR KEY_GENERIC {
+		final protected KEY_ITERATOR KEY_GENERIC i;
+
+		@SuppressWarnings("unchecked")
+		public UnmodifiableIterator( final KEY_ITERATOR KEY_GENERIC i ) {
+			this.i = i;
+		}
+
+		public boolean hasNext() { return i.hasNext(); }
+
+		public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); }
+#if #keys(primitive)
+		public KEY_GENERIC_CLASS next() { return i.next(); }
+#endif
+	}
+
+
+	/** Returns an unmodifiable iterator backed by the specified iterator.
+	 *
+	 * @param i the iterator to be wrapped in an unmodifiable iterator.
+	 * @return an unmodifiable view of the specified iterator.
+	 */
+	public static KEY_GENERIC KEY_ITERATOR KEY_GENERIC unmodifiable( final KEY_ITERATOR KEY_GENERIC i ) { return new UnmodifiableIterator KEY_GENERIC( i ); }
+
+
+
+  	/** An unmodifiable wrapper class for bidirectional iterators. */
+
+	public static class UnmodifiableBidirectionalIterator KEY_GENERIC extends KEY_ABSTRACT_BIDI_ITERATOR KEY_GENERIC {
+		final protected KEY_BIDI_ITERATOR KEY_GENERIC i;
+
+		@SuppressWarnings("unchecked")
+		public UnmodifiableBidirectionalIterator( final KEY_BIDI_ITERATOR KEY_GENERIC i ) {
+			this.i = i;
+		}
+
+		public boolean hasNext() { return i.hasNext(); }
+		public boolean hasPrevious() { return i.hasPrevious(); }
+		public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); }
+		public KEY_GENERIC_TYPE PREV_KEY() { return i.PREV_KEY(); }
+#if #keys(primitive)
+		public KEY_GENERIC_CLASS next() { return i.next(); }
+		public KEY_GENERIC_CLASS previous() { return i.previous(); }
+#endif
+	}
+
+
+	/** Returns an unmodifiable bidirectional iterator backed by the specified bidirectional iterator.
+	 *
+	 * @param i the bidirectional iterator to be wrapped in an unmodifiable bidirectional iterator.
+	 * @return an unmodifiable view of the specified bidirectional iterator.
+	 */
+	public static KEY_GENERIC KEY_BIDI_ITERATOR KEY_GENERIC unmodifiable( final KEY_BIDI_ITERATOR KEY_GENERIC i ) { return new UnmodifiableBidirectionalIterator KEY_GENERIC( i ); }
+
+
+  	/** An unmodifiable wrapper class for list iterators. */
+
+	public static class UnmodifiableListIterator KEY_GENERIC extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC {
+		final protected KEY_LIST_ITERATOR KEY_GENERIC i;
+
+		@SuppressWarnings("unchecked")
+		public UnmodifiableListIterator( final KEY_LIST_ITERATOR KEY_GENERIC i ) {
+			this.i = i;
+		}
+
+		public boolean hasNext() { return i.hasNext(); }
+		public boolean hasPrevious() { return i.hasPrevious(); }
+		public KEY_GENERIC_TYPE NEXT_KEY() { return i.NEXT_KEY(); }
+		public KEY_GENERIC_TYPE PREV_KEY() { return i.PREV_KEY(); }
+		public int nextIndex() { return i.nextIndex(); }
+		public int previousIndex() { return i.previousIndex(); }
+#if #keys(primitive)
+		public KEY_GENERIC_CLASS next() { return i.next(); }
+		public KEY_GENERIC_CLASS previous() { return i.previous(); }
+#endif
+	}
+
+	/** Returns an unmodifiable list iterator backed by the specified list iterator.
+	 *
+	 * @param i the list iterator to be wrapped in an unmodifiable list iterator.
+	 * @return an unmodifiable view of the specified list iterator.
+	 */
+	public static KEY_GENERIC KEY_LIST_ITERATOR KEY_GENERIC unmodifiable( final KEY_LIST_ITERATOR KEY_GENERIC i ) { return new UnmodifiableListIterator KEY_GENERIC( i ); }
+
+#if #keyclass(Short) || #keyclass(Integer) || #keyclass(Long) || #keyclass(Float) || #keyclass(Double)
+
+  	/** A wrapper promoting the results of a ByteIterator. */
+
+	protected static class ByteIteratorWrapper implements KEY_ITERATOR {
+		final it.unimi.dsi.fastutil.bytes.ByteIterator iterator;
+		
+		public ByteIteratorWrapper( final it.unimi.dsi.fastutil.bytes.ByteIterator iterator ) {
+			this.iterator = iterator;
+		}
+	
+		public boolean hasNext() { return iterator.hasNext(); }
+		public KEY_GENERIC_CLASS next() { return KEY_GENERIC_CLASS.valueOf( iterator.nextByte() ); }
+		public KEY_TYPE NEXT_KEY() { return iterator.nextByte(); }
+		public void remove() { iterator.remove(); }
+		public int skip( final int n ) { return iterator.skip( n ); }
+	}
+
+	/** Returns an iterator backed by the specified byte iterator. 
+	 * @return an iterator backed by the specified byte iterator. 
+	 */
+	public static KEY_ITERATOR wrap( final it.unimi.dsi.fastutil.bytes.ByteIterator iterator ) {
+		return new ByteIteratorWrapper( iterator );
+	}
+#endif
+
+#if #keyclass(Integer) || #keyclass(Long) || #keyclass(Float) || #keyclass(Double)
+
+  	/** A wrapper promoting the results of a ShortIterator. */
+
+	protected static class ShortIteratorWrapper implements KEY_ITERATOR {
+		final it.unimi.dsi.fastutil.shorts.ShortIterator iterator;
+		
+		public ShortIteratorWrapper( final it.unimi.dsi.fastutil.shorts.ShortIterator iterator ) {
+			this.iterator = iterator;
+		}
+	
+		public boolean hasNext() { return iterator.hasNext(); }
+		public KEY_GENERIC_CLASS next() { return KEY_GENERIC_CLASS.valueOf( iterator.nextShort() ); }
+		public KEY_TYPE NEXT_KEY() { return iterator.nextShort(); }
+		public void remove() { iterator.remove(); }
+		public int skip( final int n ) { return iterator.skip( n ); }
+	}
+
+	/** Returns an iterator backed by the specified short iterator. 
+	 * @return an iterator backed by the specified short iterator. 
+	 */
+	public static KEY_ITERATOR wrap( final it.unimi.dsi.fastutil.shorts.ShortIterator iterator ) {
+		return new ShortIteratorWrapper( iterator );
+	}
+
+#endif
+
+#if #keyclass(Long) || #keyclass(Double)
+
+  	/** A wrapper promoting the results of an IntIterator. */
+
+	protected static class IntIteratorWrapper implements KEY_ITERATOR {
+		final it.unimi.dsi.fastutil.ints.IntIterator iterator;
+		
+		public IntIteratorWrapper( final it.unimi.dsi.fastutil.ints.IntIterator iterator ) {
+			this.iterator = iterator;
+		}
+	
+		public boolean hasNext() { return iterator.hasNext(); }
+		public KEY_GENERIC_CLASS next() { return KEY_GENERIC_CLASS.valueOf( iterator.nextInt() ); }
+		public KEY_TYPE NEXT_KEY() { return iterator.nextInt(); }
+		public void remove() { iterator.remove(); }
+		public int skip( final int n ) { return iterator.skip( n ); }
+	}
+
+	/** Returns an iterator backed by the specified integer iterator. 
+	 * @return an iterator backed by the specified integer iterator. 
+	 */
+
+	public static KEY_ITERATOR wrap( final it.unimi.dsi.fastutil.ints.IntIterator iterator ) {
+		return new IntIteratorWrapper( iterator );
+	}
+
+#endif
+
+#if #keyclass(Double)
+
+  	/** A wrapper promoting the results of a FloatIterator. */
+
+	protected static class FloatIteratorWrapper implements KEY_ITERATOR {
+		final it.unimi.dsi.fastutil.floats.FloatIterator iterator;
+		
+		public FloatIteratorWrapper( final it.unimi.dsi.fastutil.floats.FloatIterator iterator ) {
+			this.iterator = iterator;
+		}
+	
+		public boolean hasNext() { return iterator.hasNext(); }
+		public KEY_GENERIC_CLASS next() { return KEY_GENERIC_CLASS.valueOf( iterator.nextFloat() ); }
+		public KEY_TYPE NEXT_KEY() { return iterator.nextFloat(); }
+		public void remove() { iterator.remove(); }
+		public int skip( final int n ) { return iterator.skip( n ); }
+	}
+
+	/** Returns an iterator backed by the specified float iterator. 
+	 * @return an iterator backed by the specified float iterator. 
+	 */
+	public static KEY_ITERATOR wrap( final it.unimi.dsi.fastutil.floats.FloatIterator iterator ) {
+		return new FloatIteratorWrapper( iterator );
+	}
+#endif
+}
diff --git a/drv/LinkedOpenCustomDoubleHashMap.drv b/drv/LinkedOpenCustomDoubleHashMap.drv
new file mode 120000
index 0000000..2a96034
--- /dev/null
+++ b/drv/LinkedOpenCustomDoubleHashMap.drv
@@ -0,0 +1 @@
+OpenDoubleHashMap.drv
\ No newline at end of file
diff --git a/drv/LinkedOpenCustomDoubleHashSet.drv b/drv/LinkedOpenCustomDoubleHashSet.drv
new file mode 120000
index 0000000..579ba65
--- /dev/null
+++ b/drv/LinkedOpenCustomDoubleHashSet.drv
@@ -0,0 +1 @@
+OpenDoubleHashSet.drv
\ No newline at end of file
diff --git a/drv/LinkedOpenCustomHashMap.drv b/drv/LinkedOpenCustomHashMap.drv
new file mode 120000
index 0000000..2da9b3a
--- /dev/null
+++ b/drv/LinkedOpenCustomHashMap.drv
@@ -0,0 +1 @@
+OpenHashMap.drv
\ No newline at end of file
diff --git a/drv/LinkedOpenCustomHashSet.drv b/drv/LinkedOpenCustomHashSet.drv
new file mode 120000
index 0000000..d00472c
--- /dev/null
+++ b/drv/LinkedOpenCustomHashSet.drv
@@ -0,0 +1 @@
+OpenHashSet.drv
\ No newline at end of file
diff --git a/drv/LinkedOpenDoubleHashMap.drv b/drv/LinkedOpenDoubleHashMap.drv
new file mode 120000
index 0000000..2a96034
--- /dev/null
+++ b/drv/LinkedOpenDoubleHashMap.drv
@@ -0,0 +1 @@
+OpenDoubleHashMap.drv
\ No newline at end of file
diff --git a/drv/LinkedOpenDoubleHashSet.drv b/drv/LinkedOpenDoubleHashSet.drv
new file mode 120000
index 0000000..579ba65
--- /dev/null
+++ b/drv/LinkedOpenDoubleHashSet.drv
@@ -0,0 +1 @@
+OpenDoubleHashSet.drv
\ No newline at end of file
diff --git a/drv/LinkedOpenHashMap.drv b/drv/LinkedOpenHashMap.drv
new file mode 120000
index 0000000..2da9b3a
--- /dev/null
+++ b/drv/LinkedOpenHashMap.drv
@@ -0,0 +1 @@
+OpenHashMap.drv
\ No newline at end of file
diff --git a/drv/LinkedOpenHashSet.drv b/drv/LinkedOpenHashSet.drv
new file mode 120000
index 0000000..d00472c
--- /dev/null
+++ b/drv/LinkedOpenHashSet.drv
@@ -0,0 +1 @@
+OpenHashSet.drv
\ No newline at end of file
diff --git a/drv/List.drv b/drv/List.drv
new file mode 100644
index 0000000..41d7dc3
--- /dev/null
+++ b/drv/List.drv
@@ -0,0 +1,210 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.List;
+
+#if ! #keyclass(Reference)
+
+/** A type-specific {@link List}; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>Note that this type-specific interface extends {@link Comparable}: it is expected that implementing
+ * classes perform a lexicographical comparison using the standard operator "less then" for primitive types,
+ * and the usual {@link Comparable#compareTo(Object) compareTo()} method for objects.
+ *
+ * <P>Additionally, this interface strengthens {@link #listIterator()},
+ * {@link #listIterator(int)} and {@link #subList(int,int)}.
+ *
+ * <P>Besides polymorphic methods, this interfaces specifies methods to copy into an array or remove contiguous
+ * sublists. Although the abstract implementation of this interface provides simple, one-by-one implementations
+ * of these methods, it is expected that concrete implementation override them with optimized versions.
+ *
+ * @see List
+ */
+
+public interface LIST KEY_GENERIC extends List<KEY_GENERIC_CLASS>, Comparable<List<? extends KEY_GENERIC_CLASS>>, COLLECTION KEY_GENERIC {
+#else
+
+/** A type-specific {@link List}; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>Additionally, this interface strengthens {@link #iterator()}, {@link #listIterator()},
+ * {@link #listIterator(int)} and {@link #subList(int,int)}. The former had been already
+ * strengthened upstream, but unfortunately {@link List} re-specifies it.
+ *
+ * <P>Besides polymorphic methods, this interfaces specifies methods to copy into an array or remove contiguous
+ * sublists. Although the abstract implementation of this interface provides simple, one-by-one implementations
+ * of these methods, it is expected that concrete implementation override them with optimized versions.
+ *
+ * @see List
+ */
+
+public interface LIST KEY_GENERIC extends List<KEY_GENERIC_CLASS>, COLLECTION KEY_GENERIC {
+#endif
+
+	/** Returns a type-specific iterator on the elements of this list (in proper sequence).
+	 *
+	 * Note that this specification strengthens the one given in {@link List#iterator()}.
+	 * It would not be normally necessary, but {@link java.lang.Iterable#iterator()} is bizarrily re-specified
+	 * in {@link List}.
+	 *
+	 * @return an iterator on the elements of this list (in proper sequence).
+	 */
+	KEY_LIST_ITERATOR KEY_GENERIC iterator();
+
+	/** Returns a type-specific list iterator on the list.
+	 *
+	 * @see #listIterator()
+	 * @deprecated As of <code>fastutil</code> 5, replaced by {@link #listIterator()}.
+	 */
+	@Deprecated
+	KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD();
+
+	/** Returns a type-specific list iterator on the list starting at a given index.
+	 *
+	 * @see #listIterator(int)
+	 * @deprecated As of <code>fastutil</code> 5, replaced by {@link #listIterator(int)}.
+	 */
+	@Deprecated
+	KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( int index );
+
+	/** Returns a type-specific list iterator on the list.
+	 *
+	 * @see List#listIterator()
+	 */
+	KEY_LIST_ITERATOR KEY_GENERIC listIterator();
+
+	/** Returns a type-specific list iterator on the list starting at a given index.
+	 *
+	 * @see List#listIterator(int)
+	 */
+	KEY_LIST_ITERATOR KEY_GENERIC listIterator( int index );
+
+	/** Returns a type-specific view of the portion of this list from the index <code>from</code>, inclusive, to the index <code>to</code>, exclusive.
+	 * @see List#subList(int,int)
+	 * @deprecated As of <code>fastutil</code> 5, replaced by {@link #subList(int,int)}.
+	 */
+	@Deprecated
+	LIST KEY_GENERIC SUBLIST_METHOD( int from, int to );
+
+	/** Returns a type-specific view of the portion of this list from the index <code>from</code>, inclusive, to the index <code>to</code>, exclusive.
+	 *
+	 * <P>Note that this specification strengthens the one given in {@link List#subList(int,int)}.
+	 *
+	 * @see List#subList(int,int)
+	 */
+	LIST KEY_GENERIC subList(int from, int to);
+
+
+	/** Sets the size of this list.
+	 *
+	 * <P>If the specified size is smaller than the current size, the last elements are
+	 * discarded. Otherwise, they are filled with 0/<code>null</code>/<code>false</code>.
+	 *
+	 * @param size the new size.
+	 */
+
+	void size( int size );
+
+	/** Copies (hopefully quickly) elements of this type-specific list into the given array.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param a the destination array.
+	 * @param offset the offset into the destination array where to store the first element copied.
+	 * @param length the number of elements to be copied.
+	 */
+	void getElements( int from, KEY_TYPE a[], int offset, int length );
+
+	/** Removes (hopefully quickly) elements of this type-specific list.
+	 *
+	 * @param from the start index (inclusive).
+	 * @param to the end index (exclusive).
+	 */
+	void removeElements( int from, int to );
+
+	/** Add (hopefully quickly) elements to this type-specific list.
+	 *
+	 * @param index the index at which to add elements.
+	 * @param a the array containing the elements.
+	 */
+	void addElements( int index, KEY_GENERIC_TYPE a[] );
+
+	/** Add (hopefully quickly) elements to this type-specific list.
+	 *
+	 * @param index the index at which to add elements.
+	 * @param a the array containing the elements.
+	 * @param offset the offset of the first element to add.
+	 * @param length the number of elements to add.
+	 */
+	void addElements( int index, KEY_GENERIC_TYPE a[], int offset, int length );
+
+#if #keys(primitive)
+
+	/**
+	 * @see List#add(Object)
+	 */
+	boolean add( KEY_TYPE key );
+
+	/**
+	 * @see List#add(int,Object)
+	 */
+	void add( int index, KEY_TYPE key );
+
+	/**
+	 * @see List#add(int,Object)
+	 */
+	boolean addAll( int index, COLLECTION c );
+
+	/**
+	 * @see List#add(int,Object)
+	 */
+	boolean addAll( int index, LIST c );
+
+	/**
+	 * @see List#add(int,Object)
+	 */
+	boolean addAll( LIST c );
+
+	/**
+	 * @see List#get(int)
+	 */
+	KEY_TYPE GET_KEY( int index );
+
+	/**
+	 * @see List#indexOf(Object)
+	 */
+	int indexOf( KEY_TYPE k );
+
+	/**
+	 * @see List#lastIndexOf(Object)
+	 */
+	int lastIndexOf( KEY_TYPE k );
+
+	/**
+	 * @see List#remove(int)
+	 */
+	KEY_TYPE REMOVE_KEY( int index );
+
+	/**
+	 * @see List#set(int,Object)
+	 */
+	KEY_TYPE set( int index, KEY_TYPE k );
+
+#endif
+
+
+}
diff --git a/drv/ListIterator.drv b/drv/ListIterator.drv
new file mode 100644
index 0000000..28ad318
--- /dev/null
+++ b/drv/ListIterator.drv
@@ -0,0 +1,40 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.ListIterator;
+
+/** A type-specific bidirectional iterator that is also a {@link ListIterator}.
+ *
+ * <P>This interface merges the methods provided by a {@link ListIterator} and
+ * a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator}. Moreover, it provides
+ * type-specific versions of {@link java.util.ListIterator#add(Object) add()}
+ * and {@link java.util.ListIterator#set(Object) set()}.
+ *
+ * @see java.util.ListIterator
+ * @see it.unimi.dsi.fastutil.BidirectionalIterator
+ */
+
+public interface KEY_LIST_ITERATOR KEY_GENERIC extends ListIterator<KEY_GENERIC_CLASS>, KEY_BIDI_ITERATOR KEY_GENERIC {
+
+#if #keys(primitive)
+	void set( KEY_TYPE k );
+	void add( KEY_TYPE k );
+#endif
+
+}
diff --git a/drv/Lists.drv b/drv/Lists.drv
new file mode 100644
index 0000000..2097338
--- /dev/null
+++ b/drv/Lists.drv
@@ -0,0 +1,840 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.List;
+import java.util.Collection;
+import java.util.Random;
+
+/** A class providing static methods and objects that do useful things with type-specific lists.
+ *
+ * @see java.util.Collections
+ */
+
+public class LISTS {
+
+	private LISTS() {}
+
+	/** Shuffles the specified list using the specified pseudorandom number generator.
+	 * 
+	 * @param l the list to be shuffled.
+	 * @param random a pseudorandom number generator (please use a <a href="http://dsiutils.dsi.unimi.it/docs/it/unimi/dsi/util/XorShiftStarRandom.html">XorShift*</a> generator).
+	 * @return <code>l</code>.
+	 */
+	public static KEY_GENERIC LIST KEY_GENERIC shuffle( final LIST KEY_GENERIC l, final Random random ) {
+		for( int i = l.size(); i-- != 0; ) {
+			final int p = random.nextInt( i + 1 );
+			final KEY_GENERIC_TYPE t = l.GET_KEY( i );
+			l.set( i, l.GET_KEY( p ) );
+			l.set( p, t );
+		}
+		return l;
+	}
+
+	/** An immutable class representing an empty type-specific list.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific list.
+	 */
+
+	public static class EmptyList KEY_GENERIC extends COLLECTIONS.EmptyCollection KEY_GENERIC implements LIST KEY_GENERIC, java.io.Serializable, Cloneable {
+		
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected EmptyList() {}
+
+		public void add( final int index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); } 
+		public boolean add( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_TYPE REMOVE_KEY( int i ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_TYPE set( final int index, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+
+		public int indexOf( KEY_TYPE k ) { return -1; }
+		public int lastIndexOf( KEY_TYPE k ) { return -1; }
+
+		public boolean addAll( Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( int i, Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean removeAll( Collection<?> c ) { throw new UnsupportedOperationException(); }
+
+		public KEY_GENERIC_CLASS get( int i ) { throw new IndexOutOfBoundsException(); }
+
+#if #keys(primitive)
+		public boolean addAll( COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( LIST c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( int i, COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( int i, LIST c ) { throw new UnsupportedOperationException(); }
+
+		public void add( final int index, final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); } 
+		public boolean add( final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_CLASS set( final int index, final KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_TYPE GET_KEY( int i ) { throw new IndexOutOfBoundsException(); }
+
+		public KEY_GENERIC_CLASS remove( int k ) { throw new UnsupportedOperationException(); }
+
+		public int indexOf( Object k ) { return -1; }
+		public int lastIndexOf( Object k ) { return -1; }
+#endif
+	
+		//@SuppressWarnings("unchecked")
+		//public KEY_ITERATOR KEY_GENERIC iterator( int i ) { if ( i == 0 ) return ITERATORS.EMPTY_ITERATOR; throw new IndexOutOfBoundsException( String.valueOf( i ) ); }
+
+		@Deprecated
+		@SuppressWarnings("unchecked")
+		public KEY_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return ITERATORS.EMPTY_ITERATOR; }
+
+		@SuppressWarnings("unchecked")
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return ITERATORS.EMPTY_ITERATOR; }
+
+		@SuppressWarnings("unchecked")
+		public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return ITERATORS.EMPTY_ITERATOR; }
+
+		@SuppressWarnings("unchecked")
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator( int i ) { if ( i == 0 ) return ITERATORS.EMPTY_ITERATOR; throw new IndexOutOfBoundsException( String.valueOf( i ) ); }
+
+		@Deprecated
+		public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD() { return listIterator(); }
+
+		@Deprecated
+		public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( int i ) { return listIterator( i ); }
+
+		public LIST KEY_GENERIC subList( int from, int to ) { if ( from == 0 && to == 0 ) return this; throw new IndexOutOfBoundsException(); }
+
+		@Deprecated
+		public LIST KEY_GENERIC SUBLIST_METHOD( int from, int to ) { return subList( from, to ); }
+
+		public void getElements( int from, KEY_TYPE[] a, int offset, int length ) { if ( from == 0 && length == 0 && offset >= 0 && offset <= a.length ) return; throw new IndexOutOfBoundsException(); }
+		public void removeElements( int from, int to ) { throw new UnsupportedOperationException(); }
+
+		public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) { throw new UnsupportedOperationException(); }
+		public void addElements( int index, final KEY_GENERIC_TYPE a[] ) { throw new UnsupportedOperationException(); }
+
+		public void size( int s )  { throw new UnsupportedOperationException(); }
+
+		public int compareTo( final List<? extends KEY_GENERIC_CLASS> o ) {
+			if ( o == this ) return 0;
+			return ((List<?>)o).isEmpty() ? 0 : -1;
+		}
+
+		private Object readResolve() { return EMPTY_LIST; }
+		public Object clone() { return EMPTY_LIST; }
+	}
+
+	/** An empty list (immutable). It is serializable and cloneable. 
+	 *
+	 * <P>The class of this objects represent an abstract empty list
+	 * that is a sublist of any type of list. Thus, {@link #EMPTY_LIST}
+	 * may be assigned to a variable of any (sorted) type-specific list.
+	 */
+
+	@SuppressWarnings("rawtypes")
+	public static final EmptyList EMPTY_LIST = new EmptyList();
+
+
+
+	/** An immutable class representing a type-specific singleton list. 
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific list.
+	 */
+
+	public static class Singleton KEY_GENERIC extends ABSTRACT_LIST KEY_GENERIC implements java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		private final KEY_GENERIC_TYPE element;
+	
+		private Singleton( final KEY_GENERIC_TYPE element ) {
+			this.element = element;
+		}
+	
+		public KEY_GENERIC_TYPE GET_KEY( final int i ) { if ( i == 0 ) return element; throw new IndexOutOfBoundsException(); }
+		public KEY_GENERIC_TYPE REMOVE_KEY( final int i ) { throw new UnsupportedOperationException(); }
+		public boolean contains( final KEY_TYPE k ) { return KEY_EQUALS( k, element ); }
+	
+		public boolean addAll( final Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final int i, final Collection <? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean removeAll( final Collection<?> c ) { throw new UnsupportedOperationException(); }
+		public boolean retainAll( final Collection<?> c ) { throw new UnsupportedOperationException(); }
+	
+		/* Slightly optimized w.r.t. the one in ABSTRACT_SET. */
+	
+		public KEY_TYPE[] TO_KEY_ARRAY() {
+			KEY_TYPE a[] = new KEY_TYPE[ 1 ];
+			a[ 0 ] = element;
+			return a;
+		}
+	
+		@SuppressWarnings("unchecked")
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return ITERATORS.singleton( element ); }
+
+		public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); }
+
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator( int i ) { 
+			if ( i > 1 || i < 0 ) throw new  IndexOutOfBoundsException();
+			KEY_LIST_ITERATOR KEY_GENERIC l = listIterator();
+			if ( i == 1 ) l.next();
+			return l;
+		}
+
+		@SuppressWarnings("unchecked")
+		public LIST KEY_GENERIC subList( final int from, final int to ) {
+			ensureIndex( from );
+			ensureIndex( to );
+			if ( from > to ) throw new IndexOutOfBoundsException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+			
+			if ( from != 0 || to != 1 ) return EMPTY_LIST;
+			return this;
+		}
+		
+		public int size() { return 1; }
+		public void size( final int size ) { throw new UnsupportedOperationException(); }
+		public void clear() { throw new UnsupportedOperationException(); }
+	
+		public Object clone() { return this; }
+
+#if #keys(primitive)
+		public boolean rem( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final int i, final COLLECTION c ) { throw new UnsupportedOperationException(); }
+#else
+		public boolean remove( final Object k ) { throw new UnsupportedOperationException(); }
+#endif
+
+	}
+
+	/** Returns a type-specific immutable list containing only the specified element. The returned list is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned list.
+	 * @return a type-specific immutable list containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC LIST KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) { return new Singleton KEY_GENERIC( element ); }
+
+#if ! #keys(reference)
+
+	/** Returns a type-specific immutable list containing only the specified element. The returned list is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned list.
+	 * @return a type-specific immutable list containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC LIST KEY_GENERIC singleton( final Object element ) { return new Singleton KEY_GENERIC( KEY_OBJ2TYPE( element ) ); }
+
+#endif
+
+
+	/** A synchronized wrapper class for lists. */
+
+	public static class SynchronizedList KEY_GENERIC extends COLLECTIONS.SynchronizedCollection KEY_GENERIC implements LIST KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final LIST KEY_GENERIC list; // Due to the large number of methods that are not in COLLECTION, this is worth caching.
+
+		protected SynchronizedList( final LIST KEY_GENERIC l, final Object sync ) {
+			super( l, sync );
+			this.list = l;
+		}
+
+		protected SynchronizedList( final LIST KEY_GENERIC l ) {
+			super( l );
+			this.list = l;
+		}
+
+		public KEY_GENERIC_TYPE GET_KEY( final int i ) { synchronized( sync ) { return list.GET_KEY( i ); } }
+		public KEY_GENERIC_TYPE set( final int i, final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return list.set( i, k ); } }
+		public void add( final int i, final KEY_GENERIC_TYPE k ) { synchronized( sync ) { list.add( i, k ); } }
+		public KEY_GENERIC_TYPE REMOVE_KEY( final int i ) { synchronized( sync ) { return list.REMOVE_KEY( i ); } }
+
+		public int indexOf( final KEY_TYPE k ) { synchronized( sync ) { return list.indexOf( k ); } }
+		public int lastIndexOf( final KEY_TYPE k ) { synchronized( sync ) { return list.lastIndexOf( k ); } }
+
+		public boolean addAll( final int index, final Collection<? extends KEY_GENERIC_CLASS> c ) { synchronized( sync ) { return list.addAll( index, c ); } }
+
+		public void getElements( final int from, final KEY_TYPE a[], final int offset, final int length ) { synchronized( sync ) { list.getElements( from, a, offset, length ); } }
+		public void removeElements( final int from, final int to ) { synchronized( sync ) { list.removeElements( from, to ); } }
+		public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) { synchronized( sync ) { list.addElements( index, a, offset, length ); } }
+		public void addElements( int index, final KEY_GENERIC_TYPE a[] ) { synchronized( sync ) { list.addElements( index, a ); } }
+		public void size( final int size ) { synchronized( sync ) { list.size( size ); } }
+
+		public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return list.listIterator(); }
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return list.listIterator(); }
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int i ) { return list.listIterator( i ); }
+
+		@Deprecated
+		public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD() { return listIterator(); }
+
+		@Deprecated
+		public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( final int i ) { return listIterator( i ); }
+
+		public LIST KEY_GENERIC subList( final int from, final int to ) { synchronized( sync ) { return synchronize( list.subList( from, to ), sync ); } }
+
+		@Deprecated
+		public LIST KEY_GENERIC SUBLIST_METHOD( final int from, final int to ) { return subList( from, to ); }
+
+		public boolean equals( final Object o ) { synchronized( sync ) { return collection.equals( o ); } }
+		public int hashCode() { synchronized( sync ) { return collection.hashCode(); } }
+
+#if ! #keyclass(Reference)
+		public int compareTo( final List<? extends KEY_GENERIC_CLASS> o ) { synchronized( sync ) { return list.compareTo( o ); } }
+#endif
+
+#if #keys(primitive)
+		public boolean addAll( final int index, final COLLECTION c ) { synchronized( sync ) { return list.addAll( index, c ); } }
+		public boolean addAll( final int index, LIST l ) { synchronized( sync ) { return list.addAll( index, l ); } }
+		public boolean addAll( LIST l ) { synchronized( sync ) { return list.addAll( l ); } }
+
+		public KEY_GENERIC_CLASS get( final int i ) { synchronized( sync ) { return list.get( i ); } }
+		public void add( final int i, KEY_GENERIC_CLASS k ) { synchronized( sync ) { list.add( i, k ); } }
+		public KEY_GENERIC_CLASS set( final int index, KEY_GENERIC_CLASS k ) { synchronized( sync ) { return list.set( index, k ); } }
+		public KEY_GENERIC_CLASS remove( final int i ) { synchronized( sync ) { return list.remove( i ); } }
+		public int indexOf( final Object o ) { synchronized( sync ) { return list.indexOf( o ); } }
+		public int lastIndexOf( final Object o ) { synchronized( sync ) { return list.lastIndexOf( o ); } }
+#endif
+	}
+
+
+	/** Returns a synchronized type-specific list backed by the given type-specific list.
+	 *
+	 * @param l the list to be wrapped in a synchronized list.
+	 * @return a synchronized view of the specified list.
+	 * @see java.util.Collections#synchronizedList(List)
+	 */
+	public static KEY_GENERIC LIST KEY_GENERIC synchronize( final LIST KEY_GENERIC l ) { return new SynchronizedList KEY_GENERIC( l ); }
+
+	/** Returns a synchronized type-specific list backed by the given type-specific list, using an assigned object to synchronize.
+	 *
+	 * @param l the list to be wrapped in a synchronized list.
+	 * @param sync an object that will be used to synchronize the access to the list.
+	 * @return a synchronized view of the specified list.
+	 * @see java.util.Collections#synchronizedList(List)
+	 */
+
+	public static KEY_GENERIC LIST KEY_GENERIC synchronize( final LIST KEY_GENERIC l, final Object sync ) { return new SynchronizedList KEY_GENERIC( l, sync ); }
+
+
+
+	/** An unmodifiable wrapper class for lists. */
+
+	public static class UnmodifiableList KEY_GENERIC extends COLLECTIONS.UnmodifiableCollection KEY_GENERIC implements LIST KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final LIST KEY_GENERIC list; // Due to the large number of methods that are not in COLLECTION, this is worth caching.
+
+		protected UnmodifiableList( final LIST KEY_GENERIC l ) {
+			super( l );
+			this.list = l;
+		}
+
+		public KEY_GENERIC_TYPE GET_KEY( final int i ) { return list.GET_KEY( i ); }
+		public KEY_GENERIC_TYPE set( final int i, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public void add( final int i, final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_TYPE REMOVE_KEY( final int i ) { throw new UnsupportedOperationException(); }
+
+		public int indexOf( final KEY_TYPE k ) { return list.indexOf( k ); }
+		public int lastIndexOf( final KEY_TYPE k ) { return list.lastIndexOf( k ); }
+
+		public boolean addAll( final int index, final Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+
+		public void getElements( final int from, final KEY_TYPE a[], final int offset, final int length ) { list.getElements( from, a, offset, length ); }
+		public void removeElements( final int from, final int to ) { throw new UnsupportedOperationException(); }
+		public void addElements( int index, final KEY_GENERIC_TYPE a[], int offset, int length ) { throw new UnsupportedOperationException(); }
+		public void addElements( int index, final KEY_GENERIC_TYPE a[] ) { throw new UnsupportedOperationException(); }
+		public void size( final int size ) { list.size( size ); }
+
+		public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return listIterator(); }
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator() { return ITERATORS.unmodifiable( list.listIterator() ); }
+		public KEY_LIST_ITERATOR KEY_GENERIC listIterator( final int i ) { return ITERATORS.unmodifiable( list.listIterator( i ) ); }
+
+		@Deprecated
+		public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD() { return listIterator(); }
+
+		@Deprecated
+		public KEY_LIST_ITERATOR KEY_GENERIC KEY_LIST_ITERATOR_METHOD( final int i ) { return listIterator( i ); }
+
+		public LIST KEY_GENERIC subList( final int from, final int to ) { return unmodifiable( list.subList( from, to ) ); }
+
+		@Deprecated
+		public LIST KEY_GENERIC SUBLIST_METHOD( final int from, final int to ) { return subList( from, to ); }
+
+		public boolean equals( final Object o ) { return collection.equals( o ); }
+		public int hashCode() { return collection.hashCode(); }
+
+#if ! #keyclass(Reference)
+		public int compareTo( final List<? extends KEY_GENERIC_CLASS> o ) { return list.compareTo( o ); }
+#endif
+
+#if #keys(primitive)
+		public boolean addAll( final int index, final COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final LIST l ) { throw new UnsupportedOperationException(); }
+		public boolean addAll( final int index, final LIST l ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_CLASS get( final int i ) { return list.get( i ); }
+		public void add( final int i, KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_CLASS set( final int index, KEY_GENERIC_CLASS k ) { throw new UnsupportedOperationException(); }
+		public KEY_GENERIC_CLASS remove( final int i ) { throw new UnsupportedOperationException(); }
+		public int indexOf( final Object o ) { return list.indexOf( o ); }
+		public int lastIndexOf( final Object o ) { return list.lastIndexOf( o ); }
+#endif
+	}
+
+
+	/** Returns an unmodifiable type-specific list backed by the given type-specific list.
+	 *
+	 * @param l the list to be wrapped in an unmodifiable list.
+	 * @return an unmodifiable view of the specified list.
+	 * @see java.util.Collections#unmodifiableList(List)
+	 */
+	public static KEY_GENERIC LIST KEY_GENERIC unmodifiable( final LIST KEY_GENERIC l ) { return new UnmodifiableList KEY_GENERIC( l ); }
+
+
+
+
+#ifdef TEST
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte ) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static void testLists( KEY_TYPE k, LIST m, List t, int level ) {
+		int n = 100;
+		int c;
+
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsIndex, tThrowsIndex, mThrowsUnsupp, tThrowsUnsupp;
+		boolean rt = false, rm = false;
+		Object Rt = null, Rm = null;
+
+		if ( level == 0 ) return;
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+		/* Now we check that m actually holds that data. */
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(java.util.Iterator i=m.listIterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" );
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				m.contains(T);
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): contains() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex) ensure( m.contains(KEY2OBJ(T)) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in keys between t and m (polymorphic method) " + m );
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				m.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): contains() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): contains() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( m.contains(KEY2OBJ(T)) ==  t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence between t and m (standard method) " + m );
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.add(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.add(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): add() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): add() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): add() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): add() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in add() between t and m " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			if ( ! KEY_EQUALS( T, k ) && mThrowsUnsupp && ! tThrowsUnsupp ) mThrowsUnsupp = true; // Stupid bug in Collections.singleton()
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): remove() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): remove() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in remove() between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal " + m );
+
+		/* Now we add and remove random data in m and t at specific positions, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			int pos = r.nextInt( 2 );
+
+			try {
+				m.add(pos, KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				t.add(pos, KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): add() at " + pos + " divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): add() at " + pos + " divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): add() at " + pos + " divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): add() at " + pos + " divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+			
+			pos = r.nextInt( 2 );
+
+			try {
+				Rm = m.remove(pos);
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				Rt = t.remove(pos);
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() at " + pos + " divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() at " + pos + " divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): remove() at " + pos + " divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): remove() at " + pos + " divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( Rm == Rt || Rm != null && Rm.equals(Rt), "Error (" + level + ", " + seed + "): divergence in remove() at " + pos + " between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal " + m );
+
+		/* Now we add and remove random collections in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.addAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.addAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): addAll() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): addAll() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): addAll() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): addAll() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in addAll() between t and m " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.removeAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.removeAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): removeAll() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): removeAll() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): removeAll() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): removeAll() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in removeAll() between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after set removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after set removal " + m );
+
+		/* Now we add random collections at specific positions in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			int pos = r.nextInt( 2 );
+
+			try {
+				rm = m.addAll(pos, java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.addAll(pos, java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): addAll() at " + pos + " divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): addAll() at " + pos + " divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): addAll() at " + pos + " divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): addAll() at " + pos + " divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in addAll() at " + pos + " between t and m " + m );
+
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after set removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after set removal " + m );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.listIterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on m)" );
+		}
+		
+		if ( m instanceof Singleton ) {
+			ensure( m.equals( ((Singleton)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((Singleton)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+		}
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		LIST m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (LIST)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if ! #keyclass(Reference)
+
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+#endif
+
+		if ( ! m.isEmpty() ) {
+			int start = r.nextInt( m.size() );
+			int end = start + r.nextInt( m.size() - start );
+			//System.err.println("Checking subList from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testLists( k, m.subList( start, end ), t.subList( start, end ), level - 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + m + t + "): ! m.equals( t ) after subList" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subList" );
+
+		}
+
+		return;
+	}
+
+	private static void test() {
+		KEY_TYPE k = genKey();
+		LIST m = new Singleton( k );
+		List u = java.util.Collections.singletonList( KEY2OBJ( k ) );
+		testLists( k, m, java.util.Collections.unmodifiableList( u ), 3 );
+		System.out.println("Test OK");
+	}
+	
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	/** This method expects as first argument a lower-cased type (e.g., "int"),
+	 * and as second optional argument a seed. */
+
+	public static void main( String arg[] ) throws Exception {
+		if ( arg.length > 1 ) r = new java.util.Random( seed = Long.parseLong( arg[ 1 ] ) );
+		  
+		try {
+			test();
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+	
+#endif
+	
+}
diff --git a/drv/Map.drv b/drv/Map.drv
new file mode 100644
index 0000000..1275b91
--- /dev/null
+++ b/drv/Map.drv
@@ -0,0 +1,138 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+
+import it.unimi.dsi.fastutil.objects.ObjectSet;
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+
+import java.util.Map;
+
+/** A type-specific {@link Map}; provides some additional methods that use polymorphism to avoid (un)boxing, and handling of a default return value.
+ *
+ * <P>Besides extending the corresponding type-specific {@linkplain it.unimi.dsi.fastutil.Function function}, this interface strengthens {@link #entrySet()},
+ * {@link #keySet()} and {@link #values()}. Maps returning entry sets of type {@link FastEntrySet} support also fast iteration.
+ *
+ * <P>A submap or subset may or may not have an
+ * independent default return value (which however must be initialized to the
+ * default return value of the originator).
+ *
+ * @see Map
+ */
+
+public interface MAP KEY_VALUE_GENERIC extends FUNCTION KEY_VALUE_GENERIC, Map<KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS> {
+
+	/** An entry set providing fast iteration. 
+	 *
+	 * <p>In some cases (e.g., hash-based classes) iteration over an entry set requires the creation
+	 * of a large number of {@link java.util.Map.Entry} objects. Some <code>fastutil</code>
+	 * maps might return {@linkplain #entrySet() entry set} objects of type <code>FastEntrySet</code>: in this case, {@link #fastIterator() fastIterator()}
+	 * will return an iterator that is guaranteed not to create a large number of objects, <em>possibly
+	 * by returning always the same entry</em> (of course, mutated).
+	 */
+
+	public interface FastEntrySet KEY_VALUE_GENERIC extends ObjectSet<MAP.Entry KEY_VALUE_GENERIC> {
+		/** Returns a fast iterator over this entry set; the iterator might return always the same entry object, suitably mutated.
+		 *
+		 * @return a fast iterator over this entry set; the iterator might return always the same {@link java.util.Map.Entry} object, suitably mutated.
+		 */
+		public ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator();
+	}
+
+	/** Returns a set view of the mappings contained in this map.
+	 *  <P>Note that this specification strengthens the one given in {@link Map#entrySet()}.
+	 *
+	 * @return a set view of the mappings contained in this map.
+	 * @see Map#entrySet()
+	 */
+
+	ObjectSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet();
+
+	/** Returns a type-specific set view of the mappings contained in this map.
+	 *
+	 * <p>This method is necessary because there is no inheritance along
+	 * type parameters: it is thus impossible to strengthen {@link #entrySet()}
+	 * so that it returns an {@link it.unimi.dsi.fastutil.objects.ObjectSet}
+	 * of objects of type {@link java.util.Map.Entry} (the latter makes it possible to
+	 * access keys and values with type-specific methods).
+	 *
+	 * @return a type-specific set view of the mappings contained in this map.
+	 * @see #entrySet()
+	 */
+
+	ObjectSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET();
+
+	/** Returns a set view of the keys contained in this map.
+	 *  <P>Note that this specification strengthens the one given in {@link Map#keySet()}.
+	 *
+	 * @return a set view of the keys contained in this map.
+	 * @see Map#keySet()
+	 */
+
+	SET KEY_GENERIC keySet();
+
+	/** Returns a set view of the values contained in this map.
+	 *  <P>Note that this specification strengthens the one given in {@link Map#values()}.
+	 *
+	 * @return a set view of the values contained in this map.
+	 * @see Map#values()
+	 */
+
+	VALUE_COLLECTION VALUE_GENERIC values();
+
+
+#if #values(primitive)
+
+	/**
+	 * @see Map#containsValue(Object)
+	 */
+
+	boolean containsValue( VALUE_TYPE value );
+
+#endif
+
+	/** A type-specific {@link java.util.Map.Entry}; provides some additional methods
+	 *  that use polymorphism to avoid (un)boxing.
+	 *
+	 * @see java.util.Map.Entry
+	 */
+
+	interface Entry KEY_VALUE_GENERIC extends Map.Entry <KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS> {
+		  
+#if #keys(primitive)
+		/**
+		 * @see java.util.Map.Entry#getKey()
+		 */
+		KEY_TYPE ENTRY_GET_KEY();
+#endif
+
+#if #values(primitive)
+		/**
+		 * @see java.util.Map.Entry#setValue(Object)
+		 */
+		VALUE_TYPE setValue(VALUE_TYPE value);
+
+		/**
+		 * @see java.util.Map.Entry#getValue()
+		 */
+		VALUE_TYPE ENTRY_GET_VALUE();
+#endif
+
+	}
+}
diff --git a/drv/Maps.drv b/drv/Maps.drv
new file mode 100644
index 0000000..a32f93c
--- /dev/null
+++ b/drv/Maps.drv
@@ -0,0 +1,358 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.objects.ObjectSet;
+import it.unimi.dsi.fastutil.objects.ObjectSets;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+import VALUE_PACKAGE.VALUE_COLLECTIONS;
+#if ! #valueclass(Object)
+import VALUE_PACKAGE.VALUE_SETS;
+#endif
+
+import java.util.Map;
+
+/** A class providing static methods and objects that do useful things with type-specific maps.
+ *
+ * @see it.unimi.dsi.fastutil.Maps
+ * @see java.util.Collections
+ */
+
+public class MAPS {
+
+	private MAPS() {}
+
+
+	/** An immutable class representing an empty type-specific map.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific map.
+	 */
+
+	public static class EmptyMap KEY_VALUE_GENERIC extends FUNCTIONS.EmptyFunction KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+	
+		protected EmptyMap() {}
+	
+		public boolean containsValue( final VALUE_TYPE v ) { return false; }
+
+		public void putAll( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) { throw new UnsupportedOperationException(); }
+
+		@SuppressWarnings("unchecked")
+		public ObjectSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() { return ObjectSets.EMPTY_SET; }
+
+		@SuppressWarnings("unchecked")
+		public SET KEY_GENERIC keySet() { return SETS.EMPTY_SET; }
+
+		@SuppressWarnings("unchecked")
+		public VALUE_COLLECTION VALUE_GENERIC values() { return VALUE_SETS.EMPTY_SET; }
+
+#if #values(primitive)
+		public boolean containsValue( final Object ov ) { return false; }
+#endif
+
+        private Object readResolve() { return EMPTY_MAP; }
+
+		public Object clone() { return EMPTY_MAP; }
+
+		public boolean isEmpty() { return true; }
+
+		@SuppressWarnings({ "rawtypes", "unchecked" })
+		public ObjectSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() { return (ObjectSet)ENTRYSET(); }
+
+		public int hashCode() { return 0; }
+
+		public boolean equals( final Object o ) {
+			if ( ! ( o instanceof Map ) ) return false;
+
+			return ((Map<?,?>)o).isEmpty();
+		}	
+		
+		public String toString() { return "{}"; }
+	}
+
+
+
+	/** An empty type-specific map (immutable). It is serializable and cloneable. */
+	 
+	@SuppressWarnings("rawtypes")
+	public static final EmptyMap EMPTY_MAP = new EmptyMap();
+
+
+	/** An immutable class representing a type-specific singleton map.	 
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific map.
+	 */
+
+	public static class Singleton KEY_VALUE_GENERIC extends FUNCTIONS.Singleton KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected transient volatile ObjectSet<MAP.Entry KEY_VALUE_GENERIC> entries;
+		protected transient volatile SET KEY_GENERIC keys;
+		protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+
+		protected Singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) {
+			super( key, value );
+		}
+	
+		public boolean containsValue( final VALUE_TYPE v ) { return VALUE_EQUALS( value, v ); }
+#if #values(primitive)
+		public boolean containsValue( final Object ov ) { return VALUE_EQUALS( VALUE_OBJ2TYPE( ov ), value ); }
+#endif
+
+		public void putAll( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) { throw new UnsupportedOperationException(); }
+
+		public ObjectSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() { if ( entries == null ) entries = ObjectSets.singleton( (MAP.Entry KEY_VALUE_GENERIC)new SingletonEntry() ); return entries; }
+		public SET KEY_GENERIC keySet() { if ( keys == null ) keys = SETS.singleton( key ); return keys; }
+		public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) values = VALUE_SETS.singleton( value ); return values; }
+
+		protected class SingletonEntry implements MAP.Entry KEY_VALUE_GENERIC, Map.Entry<KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS> {
+			public KEY_GENERIC_CLASS getKey() { return KEY2OBJ( Singleton.this.key ); }
+			public VALUE_GENERIC_CLASS getValue() { return VALUE2OBJ( Singleton.this.value ); }
+
+#if #keys(primitive)
+			public KEY_GENERIC_TYPE ENTRY_GET_KEY() { return Singleton.this.key; }
+#endif
+
+#if #values(primitive)
+			public VALUE_GENERIC_TYPE ENTRY_GET_VALUE() { return Singleton.this.value; }
+			public VALUE_GENERIC_TYPE setValue( final VALUE_GENERIC_TYPE value ) { throw new UnsupportedOperationException(); }
+#endif
+
+			public VALUE_GENERIC_CLASS setValue( final VALUE_GENERIC_CLASS value ) { throw new UnsupportedOperationException(); }
+			
+			public boolean equals( final Object o ) {
+				if (!(o instanceof Map.Entry)) return false;
+				Map.Entry<?,?> e = (Map.Entry<?,?>)o;
+			
+				return KEY_EQUALS( Singleton.this.key, KEY_OBJ2TYPE( e.getKey() ) ) && VALUE_EQUALS( Singleton.this.value, VALUE_OBJ2TYPE( e.getValue() ) );
+			}
+			
+			public int hashCode() { return KEY2JAVAHASH( Singleton.this.key ) ^ VALUE2JAVAHASH( Singleton.this.value ); }
+			public String toString() { return Singleton.this.key + "->" + Singleton.this.value; }
+		}
+
+		public boolean isEmpty() { return false; }
+
+		@SuppressWarnings({ "rawtypes", "unchecked" })
+		public ObjectSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() { return (ObjectSet)ENTRYSET(); }
+
+		public int hashCode() { return KEY2JAVAHASH( key ) ^ VALUE2JAVAHASH( value ); }
+
+		public boolean equals( final Object o ) {
+			if ( o == this ) return true;
+			if ( ! ( o instanceof Map ) ) return false;
+
+			Map<?,?> m = (Map<?,?>)o; 
+			if ( m.size() != 1 ) return false; 
+			return entrySet().iterator().next().equals( m.entrySet().iterator().next() ); 
+		}	
+		
+		public String toString() { return "{" + key + "=>" + value + "}"; }
+	}
+
+	/** Returns a type-specific immutable map containing only the specified pair. The returned map is serializable and cloneable.
+	 *
+	 * <P>Note that albeit the returned map is immutable, its default return value may be changed.
+	 *
+	 * @param key the only key of the returned map.
+	 * @param value the only value of the returned map.
+	 * @return a type-specific immutable map containing just the pair <code><key,value></code>.
+	 */
+
+	public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_TYPE key, VALUE_GENERIC_TYPE value ) {
+		return new Singleton KEY_VALUE_GENERIC( key, value );
+	}
+
+#if #keys(primitive) || #values(primitive)
+
+	/** Returns a type-specific immutable map containing only the specified pair. The returned map is serializable and cloneable.
+	 *
+	 * <P>Note that albeit the returned map is immutable, its default return value may be changed.
+	 *
+	 * @param key the only key of the returned map.
+	 * @param value the only value of the returned map.
+	 * @return a type-specific immutable map containing just the pair <code><key,value></code>.
+	 */
+
+	public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_CLASS key, final VALUE_GENERIC_CLASS value ) {
+		return new Singleton KEY_VALUE_GENERIC( KEY_CLASS2TYPE( key ), VALUE_CLASS2TYPE( value ) );
+	}
+
+#endif
+
+
+	/** A synchronized wrapper class for maps. */
+
+	public static class SynchronizedMap KEY_VALUE_GENERIC extends FUNCTIONS.SynchronizedFunction KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final MAP KEY_VALUE_GENERIC map;
+
+		protected transient volatile ObjectSet<MAP.Entry KEY_VALUE_GENERIC> entries;
+		protected transient volatile SET KEY_GENERIC keys;
+		protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+
+		protected SynchronizedMap( final MAP KEY_VALUE_GENERIC m, final Object sync ) {
+			super( m, sync );
+			this.map = m;
+		}
+
+		protected SynchronizedMap( final MAP KEY_VALUE_GENERIC m ) {
+			super( m );
+			this.map = m;
+		}
+
+		public int size() { synchronized( sync ) { return map.size(); } }
+		public boolean containsKey( final KEY_TYPE k ) { synchronized( sync ) { return map.containsKey( k ); } }
+		public boolean containsValue( final VALUE_TYPE v ) { synchronized( sync ) { return map.containsValue( v ); } }
+
+		public VALUE_GENERIC_TYPE defaultReturnValue()  { synchronized( sync ) { return map.defaultReturnValue(); } }
+		public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue )  { synchronized( sync ) { map.defaultReturnValue( defRetValue ); } }
+
+		public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { synchronized( sync ) { return map.put( k, v ); } }
+
+		//public void putAll( final MAP KEY_VALUE_EXTENDS_GENERIC c ) { synchronized( sync ) { map.putAll( c ); } }
+		public void putAll( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) { synchronized( sync ) { map.putAll( m ); } }
+
+		public ObjectSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() { if ( entries == null ) entries = ObjectSets.synchronize( map.ENTRYSET(), sync ); return entries; }
+		public SET KEY_GENERIC keySet() { if ( keys == null ) keys = SETS.synchronize( map.keySet(), sync ); return keys; }
+		public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) return VALUE_COLLECTIONS.synchronize( map.values(), sync ); return values; }
+
+		public void clear() { synchronized( sync ) { map.clear(); } }
+		public String toString() { synchronized( sync ) { return map.toString(); } }
+
+#if #keys(primitive) || #values(primitive)
+		public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS k, final VALUE_GENERIC_CLASS v ) { synchronized( sync ) { return map.put( k, v ); } }
+#endif
+
+#if #keys(primitive)
+		public VALUE_GENERIC_TYPE remove( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return map.remove( k ); } }
+		public VALUE_GENERIC_TYPE get( final KEY_GENERIC_TYPE k ) { synchronized( sync ) { return map.get( k ); } }
+		public boolean containsKey( final Object ok ) { synchronized( sync ) { return map.containsKey( ok ); } }
+#endif
+
+#if #values(primitive)
+		public boolean containsValue( final Object ov ) { synchronized( sync ) { return map.containsValue( ov ); } }
+#endif
+
+#if #keys(reference)
+		public VALUE_GENERIC_TYPE REMOVE_VALUE( final Object k ) { synchronized( sync ) { return map.REMOVE_VALUE( k ); } }
+		public VALUE_GENERIC_TYPE GET_VALUE( final Object k ) { synchronized( sync ) { return map.GET_VALUE( k ); } }
+#endif
+
+		public boolean isEmpty() { synchronized( sync ) { return map.isEmpty(); } }
+		public ObjectSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() { synchronized( sync ) { return map.entrySet(); } }
+
+		public int hashCode() { synchronized( sync ) { return map.hashCode(); } }
+		public boolean equals( final Object o ) { synchronized( sync ) { return map.equals( o  ); } }
+	}
+
+	/** Returns a synchronized type-specific map backed by the given type-specific map.
+	 *
+	 * @param m the map to be wrapped in a synchronized map.
+	 * @return a synchronized view of the specified map.
+	 * @see java.util.Collections#synchronizedMap(Map)
+	 */
+	public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC synchronize( final MAP KEY_VALUE_GENERIC m ) { return new SynchronizedMap KEY_VALUE_GENERIC( m ); }
+
+	/** Returns a synchronized type-specific map backed by the given type-specific map, using an assigned object to synchronize.
+	 *
+	 * @param m the map to be wrapped in a synchronized map.
+	 * @param sync an object that will be used to synchronize the access to the map.
+	 * @return a synchronized view of the specified map.
+	 * @see java.util.Collections#synchronizedMap(Map)
+	 */
+
+	public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC synchronize( final MAP KEY_VALUE_GENERIC m, final Object sync ) { return new SynchronizedMap KEY_VALUE_GENERIC( m, sync ); }
+
+
+
+	/** An unmodifiable wrapper class for maps. */
+
+	public static class UnmodifiableMap KEY_VALUE_GENERIC extends FUNCTIONS.UnmodifiableFunction KEY_VALUE_GENERIC implements MAP KEY_VALUE_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final MAP KEY_VALUE_GENERIC map;
+
+		protected transient volatile ObjectSet<MAP.Entry KEY_VALUE_GENERIC> entries;
+		protected transient volatile SET KEY_GENERIC keys;
+		protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+
+		protected UnmodifiableMap( final MAP KEY_VALUE_GENERIC m ) {
+			super( m );
+			this.map = m;
+		}
+
+		public int size() { return map.size(); }
+		public boolean containsKey( final KEY_TYPE k ) { return map.containsKey( k ); }
+		public boolean containsValue( final VALUE_TYPE v ) { return map.containsValue( v ); }
+
+		public VALUE_GENERIC_TYPE defaultReturnValue()  { throw new UnsupportedOperationException(); }
+		public void defaultReturnValue( final VALUE_GENERIC_TYPE defRetValue )  { throw new UnsupportedOperationException(); }
+
+		public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+
+		//public void putAll( final MAP KEY_VALUE_EXTENDS_GENERIC c ) { throw new UnsupportedOperationException(); }
+		public void putAll( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) { throw new UnsupportedOperationException(); }
+
+		public ObjectSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() { if ( entries == null ) entries = ObjectSets.unmodifiable( map.ENTRYSET() ); return entries; }
+		public SET KEY_GENERIC keySet() { if ( keys == null ) keys = SETS.unmodifiable( map.keySet() ); return keys; }
+		public VALUE_COLLECTION VALUE_GENERIC values() { if ( values == null ) return VALUE_COLLECTIONS.unmodifiable( map.values() ); return values; }
+
+		public void clear() { throw new UnsupportedOperationException(); }
+		public String toString() { return map.toString(); }
+
+#if #keys(primitive) && #values(primitive)
+		public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS k, final VALUE_GENERIC_CLASS v ) { throw new UnsupportedOperationException(); }
+#endif
+
+#if #keys(primitive)
+		public VALUE_GENERIC_TYPE remove( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public VALUE_GENERIC_TYPE get( final KEY_GENERIC_TYPE k ) { return map.get( k ); }
+		public boolean containsKey( final Object ok ) { return map.containsKey( ok ); }
+#endif
+
+#if #values(primitive)
+		public boolean containsValue( final Object ov ) { return map.containsValue( ov ); }
+#endif
+
+#if #keys(reference) || #values(reference)
+		public VALUE_GENERIC_TYPE REMOVE_VALUE( final Object k ) { throw new UnsupportedOperationException(); }
+		public VALUE_GENERIC_TYPE GET_VALUE( final Object k ) { return map.GET_VALUE( k ); }
+#endif
+
+		public boolean isEmpty() { return map.isEmpty(); }
+		public ObjectSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() { return ObjectSets.unmodifiable( map.entrySet() ); }		
+	}
+
+	/** Returns an unmodifiable type-specific map backed by the given type-specific map.
+	 *
+	 * @param m the map to be wrapped in an unmodifiable map.
+	 * @return an unmodifiable view of the specified map.
+	 * @see java.util.Collections#unmodifiableMap(Map)
+	 */
+	public static KEY_VALUE_GENERIC MAP KEY_VALUE_GENERIC unmodifiable( final MAP KEY_VALUE_GENERIC m ) { return new UnmodifiableMap KEY_VALUE_GENERIC( m ); }
+
+}
diff --git a/drv/OpenCustomDoubleHashMap.drv b/drv/OpenCustomDoubleHashMap.drv
new file mode 120000
index 0000000..2a96034
--- /dev/null
+++ b/drv/OpenCustomDoubleHashMap.drv
@@ -0,0 +1 @@
+OpenDoubleHashMap.drv
\ No newline at end of file
diff --git a/drv/OpenCustomDoubleHashSet.drv b/drv/OpenCustomDoubleHashSet.drv
new file mode 120000
index 0000000..579ba65
--- /dev/null
+++ b/drv/OpenCustomDoubleHashSet.drv
@@ -0,0 +1 @@
+OpenDoubleHashSet.drv
\ No newline at end of file
diff --git a/drv/OpenCustomHashMap.drv b/drv/OpenCustomHashMap.drv
new file mode 120000
index 0000000..2da9b3a
--- /dev/null
+++ b/drv/OpenCustomHashMap.drv
@@ -0,0 +1 @@
+OpenHashMap.drv
\ No newline at end of file
diff --git a/drv/OpenCustomHashSet.drv b/drv/OpenCustomHashSet.drv
new file mode 120000
index 0000000..d00472c
--- /dev/null
+++ b/drv/OpenCustomHashSet.drv
@@ -0,0 +1 @@
+OpenHashSet.drv
\ No newline at end of file
diff --git a/drv/OpenDoubleHashMap.drv b/drv/OpenDoubleHashMap.drv
new file mode 100644
index 0000000..f6da860
--- /dev/null
+++ b/drv/OpenDoubleHashMap.drv
@@ -0,0 +1,2606 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.NoSuchElementException;
+#ifdef Custom
+//import it.unimi.dsi.fastutil.Maps;
+#endif
+
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.HashCommon;
+import it.unimi.dsi.fastutil.bytes.ByteArrays;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION;
+
+#if #values(primitive) || #keys(primitive) && #valueclass(Object)
+import VALUE_PACKAGE.VALUE_ITERATOR;
+#endif
+
+#if #keys(reference) || #values(reference)
+import it.unimi.dsi.fastutil.objects.ObjectArrays;
+#endif
+
+
+#ifdef Linked
+
+import java.util.Comparator;
+
+#if #key(reference)
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+#endif
+
+#if #values(primitive)
+import VALUE_PACKAGE.VALUE_LIST_ITERATOR;
+#endif
+
+#if #keys(primitive) && #valueclass(Reference)
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+#endif
+
+import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet;
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator;
+import it.unimi.dsi.fastutil.objects.ObjectSortedSet;
+
+#else
+
+import it.unimi.dsi.fastutil.objects.AbstractObjectSet;
+
+#if #keys(primitive) && ! #valueclass(Object)
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+#endif
+
+#endif
+
+
+
+#ifdef Linked
+/**  A type-specific linked hash map with with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a map. The table is
+ * enlarged as needed when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <P>The enlargement speed is controlled by the <em>growth factor</em>, a
+ * positive number. If the growth factor is <var>p</var>, then the table is
+ * enlarged each time roughly by a factor 2<sup>p/16</sup>. By default, <var>p</var> is
+ * {@link Hash#DEFAULT_GROWTH_FACTOR}, which means that the table is doubled at
+ * each enlargement, but one can easily set more or less aggressive policies by
+ * calling {@link #growthFactor(int)} (note that the growth factor is <em>not</em> serialized:
+ * deserialized tables gets the {@linkplain Hash#DEFAULT_GROWTH_FACTOR default growth factor}).
+ *
+ * <P>Iterators created by this map will enumerate pairs in the same order in which they
+ * have been added to the set (note that addition of pairs whose key is already present 
+ * in the set will not change the iteration order). Note that this order has nothing in common with the natural
+ * order of the keys.
+ *
+ * <P>This class implements the interface of a sorted map, so to allow easy
+ * access of the iteration order: for instance, you can get the first key
+ * in iteration order with {@link #firstKey()} without having to create an
+ * iterator; however, this class partially violates the {@link java.util.SortedMap}
+ * contract because all submap methods throw an exception and {@link
+ * #comparator()} returns always <code>null</code>.
+ *
+ * <P>The iterators provided by the views of this class using are type-specific
+ * {@linkplain java.util.ListIterator list iterators}. However, creation of an
+ * iterator using a starting point is going to be very expensive, as the chosen
+ * starting point must be linearly searched for, unless it is {@link #lastKey()},
+ * in which case the iterator is created in constant time.
+ *
+ * <P>Note that deletions in a linked table require scanning the list until the
+ * element to be removed is found. The only exceptions are the first element, the last element,
+ * and deletions performed using an iterator.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_DOUBLE_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#else
+
+#ifdef Custom
+
+/** A type-specific hash map with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy}
+ * is specified at creation time.
+ *
+ * <P>Instances of this class use a hash table to represent a map. The table is
+ * enlarged as needed when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <P>The enlargement speed is controlled by the <em>growth factor</em>, a
+ * positive number. If the growth factor is <var>p</var>, then the table is
+ * enlarged each time roughly by a factor 2<sup>p/16</sup>. By default, <var>p</var> is
+ * {@link Hash#DEFAULT_GROWTH_FACTOR}, which means that the table is doubled at
+ * each enlargement, but one can easily set more or less aggressive policies by
+ * calling {@link #growthFactor(int)} (note that the growth factor is <em>not</em> serialized:
+ * deserialized tables gets the {@linkplain Hash#DEFAULT_GROWTH_FACTOR default growth factor}).
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_DOUBLE_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#else
+
+/** A type-specific hash map with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a map. The table is
+ * enlarged as needed when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <P>The enlargement speed is controlled by the <em>growth factor</em>, a
+ * positive number. If the growth factor is <var>p</var>, then the table is
+ * enlarged each time roughly by a factor 2<sup>p/16</sup>. By default, <var>p</var> is
+ * {@link Hash#DEFAULT_GROWTH_FACTOR}, which means that the table is doubled at
+ * each enlargement, but one can easily set more or less aggressive policies by
+ * calling {@link #growthFactor(int)} (note that the growth factor is <em>not</em> serialized:
+ * deserialized tables gets the {@linkplain Hash#DEFAULT_GROWTH_FACTOR default growth factor}).
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_DOUBLE_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#endif
+
+#endif
+	/** The array of keys. */
+	protected transient KEY_GENERIC_TYPE key[];
+
+	/** The array of values. */
+	protected transient VALUE_GENERIC_TYPE value[];
+	 
+	/** The array of occupancy states. */
+	protected transient byte state[];
+
+	/** The acceptable load factor. */
+	protected final float f;
+	 
+	/** Index into the prime list, giving the current table size. */
+	protected transient int p;
+
+	/** Threshold after which we rehash. It must be the table size times {@link #f}. */
+	protected transient int maxFill;
+
+	/** Number of free entries in the table (may be less than the table size - {@link #count} because of deleted entries). */
+	protected transient int free;
+
+	/** Number of entries in the map. */
+	protected int count;
+
+#ifdef Linked
+	/** Cached set of entries. */
+	protected transient volatile FastSortedEntrySet KEY_VALUE_GENERIC entries;
+
+	/** Cached set of keys. */
+	protected transient volatile SORTED_SET KEY_GENERIC keys;
+#else
+	/** Cached set of entries. */
+	protected transient volatile FastEntrySet KEY_VALUE_GENERIC entries;
+
+	/** Cached set of keys. */
+	protected transient volatile SET KEY_GENERIC keys;
+#endif
+
+	/** Cached collection of values. */
+	protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+
+	/** The growth factor of the table. The next table size will be <code>{@link Hash#PRIMES}[{@link #p}+growthFactor</code>. */
+	protected transient int growthFactor = Hash.DEFAULT_GROWTH_FACTOR;
+
+#ifdef Linked
+	/** The index of the first entry in iteration order. It is valid iff {@link #count} is nonzero; otherwise, it contains -1. */
+	protected transient int first = -1;
+	/** The index of the last entry in iteration order. It is valid iff {@link #count} is nonzero; otherwise, it contains -1. */
+	protected transient int last = -1;
+	/** For each entry, the next and the previous entry in iteration order
+	exclusive-or'd together. It is valid only on {@link Hash#OCCUPIED}
+	entries. The first and the last entry contain the actual successor and
+	predecessor, respectively, exclusived-or'd with -1. */
+	protected transient int link[];
+#endif
+
+#ifdef Custom
+	/** The hash strategy of this custom map. */
+	protected Strategy KEY_GENERIC strategy;
+#endif
+
+	private static final long serialVersionUID = -7046029254386353129L;
+
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+#ifdef Custom
+	/** Creates a new hash map.
+	 *
+	 * The actual table size is the least available prime greater than <code>n</code>/<code>f</code>.
+	 *
+	 * @param n the expected number of elements in the hash map.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 * @see Hash#PRIMES
+	 */
+	 
+	@SuppressWarnings("unchecked")
+	public OPEN_DOUBLE_HASH_MAP( final int n, final float f, final Strategy KEY_GENERIC strategy ) {
+		this.strategy = strategy;
+#else
+	/** Creates a new hash map.
+	 *
+	 * The actual table size is the least available prime greater than <code>n</code>/<code>f</code>.
+	 *
+	 * @param n the expected number of elements in the hash map.
+	 * @param f the load factor.
+	 * @see Hash#PRIMES
+	 */
+	 
+	@SuppressWarnings("unchecked")
+	public OPEN_DOUBLE_HASH_MAP( final int n, final float f ) {
+#endif
+		if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
+		if ( n < 0 ) throw new IllegalArgumentException( "Hash table size must be nonnegative" );
+
+		int l = Arrays.binarySearch( PRIMES, (int)( n / f ) + 1 );
+		if ( l < 0 ) l = -l - 1;
+
+		free = PRIMES[ p = l ];
+		this.f = f;
+		this.maxFill = (int)( free * f );
+		key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ free ];
+		value = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[ free ];
+		state = new byte[ free ];
+#ifdef Linked
+		link = new int[ free ];
+#endif
+	}
+	 
+	 
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param n the expected number of elements in the hash map.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final int n, final Strategy KEY_GENERIC strategy ) {
+	this( n, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param n the expected number of elements in the hash map.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final int n ) {
+		this( n, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_INITIAL_SIZE} entries
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final Strategy KEY_GENERIC strategy ) {
+		this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_INITIAL_SIZE} entries
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP() {
+		this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash map copying a given one.
+	 *
+	 * @param m a {@link Map} to be copied into the new hash map. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m, final float f, final Strategy KEY_GENERIC strategy ) {
+		this( m.size(), f, strategy );
+		putAll( m );
+	}
+#else
+	/** Creates a new hash map copying a given one.
+	 *
+	 * @param m a {@link Map} to be copied into the new hash map. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m, final float f ) {
+		this( m.size(), f );
+		putAll( m );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one.
+	 *
+	 * @param m a {@link Map} to be copied into the new hash map. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m, final Strategy KEY_GENERIC strategy ) {
+		this( m, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one.
+	 *
+	 * @param m a {@link Map} to be copied into the new hash map. 
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) {
+		this( m, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map copying a given type-specific one.
+	 *
+	 * @param m a type-specific map to be copied into the new hash map. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final float f, final Strategy KEY_GENERIC strategy ) {
+		this( m.size(), f, strategy );
+		putAll( m );
+	}
+
+#else
+	/** Creates a new hash map copying a given type-specific one.
+	 *
+	 * @param m a type-specific map to be copied into the new hash map. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final float f ) {
+		this( m.size(), f );
+		putAll( m );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one.
+	 *
+	 * @param m a type-specific map to be copied into the new hash map. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final Strategy KEY_GENERIC strategy ) {
+		this( m, DEFAULT_LOAD_FACTOR, strategy );
+	}
+
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one.
+	 *
+	 * @param m a type-specific map to be copied into the new hash map. 
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final MAP KEY_VALUE_GENERIC m ) {
+		this( m, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash map using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new hash map.
+	 * @param v the array of corresponding values in the new hash map.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final float f, final Strategy KEY_GENERIC strategy ) {
+		this( k.length, f, strategy );
+		if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
+		for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
+	}
+#else
+	/** Creates a new hash map using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new hash map.
+	 * @param v the array of corresponding values in the new hash map.
+	 * @param f the load factor.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final float f ) {
+		this( k.length, f );
+		if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
+		for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new hash map.
+	 * @param v the array of corresponding values in the new hash map.
+	 * @param strategy the strategy.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final Strategy KEY_GENERIC strategy ) {
+		this( k, v, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new hash map.
+	 * @param v the array of corresponding values in the new hash map.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[] ) {
+		this( k, v, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+
+#ifdef Custom
+	/** Returns the hashing strategy.
+	 *
+	 * @return the hashing strategy of this custom hash set.
+	 */
+
+	public Strategy KEY_GENERIC strategy() {
+		return strategy;
+	}
+#endif
+
+	/** Sets the growth factor. Subsequent enlargements will increase the table
+	 * size roughly by a multiplicative factor of 2<sup>p/16</sup>.
+	 * 
+	 * @param growthFactor the new growth factor; it must be positive.
+	 */
+
+	public void growthFactor( int growthFactor ) {
+		if ( growthFactor <= 0 ) throw new IllegalArgumentException( "Illegal growth factor " + growthFactor );
+		this.growthFactor = growthFactor;
+	}
+
+	/** Gets the growth factor.
+	 *
+	 * @return the growth factor of this set.
+	 * @see #growthFactor(int)
+	 */
+
+	public int growthFactor() {
+		return growthFactor;
+	}
+
+
+	/*
+	 * The following methods implements some basic building blocks used by
+	 * all accessors.  They are (and should be maintained) identical to those used in HashSet.drv.
+	 */
+
+	/** Searches for a key, keeping track of a possible insertion point.
+	 *
+	 * @param k the key.
+	 * @return the index of the correct insertion point, if the key is not found; otherwise,
+	 * <var>-i</var>-1, where <var>i</var> is the index of the entry containing the key.
+	 */
+
+	protected final int findInsertionPoint( final KEY_GENERIC_TYPE k ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final byte state[] = this.state;
+		final int n = key.length;
+
+		// First of all, we make the key into a positive integer.
+#if #keyclass(Object)
+		final int h, k2i = ( h = KEY2INTHASH( k ) ) & 0x7FFFFFFF; 
+#else
+		final int k2i = KEY2INTHASH(k) & 0x7FFFFFFF; 
+#endif
+		// The primary hash, a.k.a. starting point.
+		int h1 = k2i % n;
+
+		if ( state[ h1 ] == OCCUPIED && ! KEY_EQUALS( key[ h1 ], k ) ) {
+			// The secondary hash.
+			final int h2 = ( k2i % ( n - 2 ) ) + 1;
+			do {
+				h1 += h2;
+				if ( h1 >= n || h1 < 0 ) h1 -= n;
+			} while( state[ h1 ] == OCCUPIED && ! KEY_EQUALS( key[ h1 ], k ) ); // There's always a FREE entry.
+		}
+
+		if (state[ h1 ] == FREE) return h1;
+		if (state[ h1 ] == OCCUPIED) return -h1-1; // Necessarily, KEY_EQUALS( key[ h1 ], k ).
+
+		/* Tables without deletions will never use code beyond this point. */
+
+		final int i = h1; // Remember first available bucket for later.
+		  
+		/** See the comments in the documentation of the interface Hash. */
+		if ( ASSERTS ) assert state[ h1 ] == REMOVED;
+		if ( ! KEY_EQUALS( key[ h1 ], k ) ) {
+			// The secondary hash.
+			final int h2 = ( k2i % ( n - 2 ) ) + 1;
+			do {
+				h1 += h2;
+				if ( h1 >= n || h1 < 0 ) h1 -= n;
+			}  while( state[ h1 ] != FREE && ! KEY_EQUALS( key[ h1 ], k ) );
+		}
+		  
+		return state[ h1 ] == OCCUPIED ? -h1-1 : i; // In the first case, necessarily, KEY_EQUALS( key[ h1 ], k ).
+	}
+
+
+	/** Searches for a key.
+	 *
+	 * @param k the key.
+	 * @return the index of the entry containing the key, or -1 if the key wasn't found.
+	 */
+
+	protected final int findKey( final KEY_GENERIC_TYPE k ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final byte state[] = this.state;
+		final int n = key.length;
+
+		// First of all, we make the key into a positive integer.
+#if #keyclass(Object)
+		final int h, k2i = ( h = KEY2INTHASH( k ) ) & 0x7FFFFFFF; 
+#else
+		final int k2i = KEY2INTHASH(k) & 0x7FFFFFFF; 
+#endif
+		// The primary hash, a.k.a. starting point.
+		int h1 = k2i % n;
+		  
+		/** See the comments in the documentation of the interface Hash. */
+		if ( state[ h1 ] != FREE && ! KEY_EQUALS( key[ h1 ], k ) ) {
+			// The secondary hash.
+			final int h2 = ( k2i % ( n - 2 ) ) + 1;
+			do {
+				h1 += h2;
+				if ( h1 >= n || h1 < 0 ) h1 -= n;
+			} while( state[ h1 ] != FREE && ! KEY_EQUALS( key[ h1 ], k ) ); // There's always a FREE entry.
+		}
+
+		return state[ h1 ] == OCCUPIED ? h1 : -1;  // In the first case, necessarily, KEY_EQUALS( key[ h1 ], k ).
+	}
+
+
+
+	public VALUE_GENERIC_TYPE put(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v) {
+		final int i = findInsertionPoint( k );
+		  
+		if (i < 0) {
+			final VALUE_GENERIC_TYPE oldValue = value[-i-1];
+			value[-i-1] = v;
+			return oldValue;
+		}
+
+		if ( state[i] == FREE ) free--;
+		state[i] = OCCUPIED;
+		key[i] = k;
+		value[i] = v;
+
+#ifdef Linked
+		if ( count == 0 ) {
+			first = last = i;
+			link[ i ] = 0;
+		}
+		else {
+			link[ last ] ^= i ^ -1;
+			link[ i ] = last ^ -1;
+			last = i;
+		}
+#endif
+
+
+		if ( ++count >= maxFill ) {
+			int newP = Math.min( p + growthFactor, PRIMES.length - 1 );
+			// Just to be sure that size changes when p is very small.
+			while( PRIMES[ newP ] == PRIMES[ p ] ) newP++;
+			rehash( newP ); // Table too filled, let's rehash
+		}
+		if ( free == 0 ) rehash( p );
+		if ( ASSERTS ) checkTable();
+		return defRetValue;
+	}
+
+
+
+#if #values(primitive) || #keys(primitive)
+
+	public VALUE_GENERIC_CLASS put(final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov) {
+		final VALUE_GENERIC_TYPE v = VALUE_CLASS2TYPE(ov);
+		final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE(ok);
+
+		final int i = findInsertionPoint( k );
+		  
+		if (i < 0) {
+			final VALUE_GENERIC_TYPE oldValue = value[-i-1];
+			value[-i-1] = v;
+			return VALUE2OBJ(oldValue);
+		}
+
+		if ( state[i] == FREE ) free--;
+		state[i] = OCCUPIED;
+		key[i] = k;
+		value[i] = v;
+
+#ifdef Linked
+		if ( count == 0 ) {
+			first = last = i;
+			link[ i ] = 0;
+		}
+		else {
+			link[ last ] ^= i ^ -1;
+			link[ i ] = last ^ -1;
+			last = i;
+		}
+#endif
+
+		if ( ++count >= maxFill ) rehash( Math.min(p+16, PRIMES.length-1) ); // Table too filled, let's rehash
+		if ( free == 0 ) rehash( p );
+		if ( ASSERTS ) checkTable();
+		return OBJECT_DEFAULT_RETURN_VALUE;
+	}
+	 
+
+#endif
+
+
+
+
+	public boolean containsValue( final VALUE_TYPE v ) {
+		final VALUE_GENERIC_TYPE value[] = this.value;
+		final byte state[] = this.state;
+
+		int i = 0, j = count;
+
+		while(j-- != 0) {
+			while(state[ i ] != OCCUPIED ) i++;
+			if ( VALUE_EQUALS(value[ i ], v ) ) return true;
+			i++;
+		}
+		return false;
+	}
+
+	/* Removes all elements from this map.
+	 *
+	 * <P>To increase object reuse, this method does not change the table size.
+	 * If you want to reduce the table size, you must use {@link #trim()}.
+	 *
+	 */
+	public void clear() {
+		if ( free == state.length ) return;
+
+		free = state.length;
+		count = 0;
+
+		ByteArrays.fill( state, FREE );
+
+		// We null all object entries so that the garbage collector can do its work.
+#if #keys(reference)
+		ObjectArrays.fill( key, null );
+#endif
+#if #values(reference)
+		ObjectArrays.fill( value, null );
+#endif
+
+#ifdef Linked
+		first = last = -1;
+#endif
+	}
+
+	/** The entry class for a hash map does not record key and value, but
+	 * rather the position in the hash table of the corresponding entry. This
+	 * is necessary so that calls to {@link java.util.Map.Entry#setValue(Object)} are reflected in
+	 * the map */
+
+	private final class MapEntry implements MAP.Entry KEY_VALUE_GENERIC, Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> {
+		private int index;
+		
+		MapEntry( final int index ) {
+			this.index = index;
+		}
+		
+		public KEY_GENERIC_CLASS getKey() {
+			return KEY2OBJ( key[ index ] );
+		}
+		  
+#if #keys(primitive)
+		public KEY_TYPE ENTRY_GET_KEY() {
+	   		return key[ index ];
+		}
+#endif
+
+		public VALUE_GENERIC_CLASS getValue() {
+			return VALUE2OBJ( value[ index ] );
+		}
+		  
+#if #values(primitive)
+		public VALUE_GENERIC_TYPE ENTRY_GET_VALUE() {
+			return value[ index ];
+		}
+#endif
+
+		public VALUE_GENERIC_TYPE setValue( final VALUE_GENERIC_TYPE v ) {
+			final VALUE_GENERIC_TYPE oldValue = value[ index ];
+			value[ index ] = v;
+			return oldValue;
+		}
+		  
+#if #values(primitive)
+		  
+		public VALUE_GENERIC_CLASS setValue( final VALUE_GENERIC_CLASS v ) {
+			return VALUE2OBJ( setValue( VALUE_CLASS2TYPE( v ) ) );
+		}
+
+#endif
+
+		@SuppressWarnings("unchecked")
+		public boolean equals( final Object o ) {
+			if (!(o instanceof Map.Entry)) return false;
+			Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+   				
+			return KEY_EQUALS( key[ index ], KEY_CLASS2TYPE( e.getKey() ) ) && VALUE_EQUALS( value[ index ], VALUE_CLASS2TYPE( e.getValue() ) );
+		}
+		  
+		public int hashCode() {
+			return KEY2JAVAHASH( key[ index ] ) ^ VALUE2JAVAHASH( value[ index ] );
+		}
+		 
+			  
+		public String toString() {
+			return key[ index ] + "=>" + value[ index ];
+		}
+	}
+
+
+#ifdef Linked
+
+	/** Modifies the {@link #link} vector so that the given entry is removed.
+	 *
+	 * <P>If the given entry is the first or the last one, this method will complete
+	 * in constant time; otherwise, it will have to search for the given entry.
+	 *
+	 * @param i the index of an entry. 
+	 */
+	private void fixPointers( int i ) {
+		if ( count == 0 ) {
+			first = last = -1;
+			return;
+		}
+
+		if ( first == i ) {
+			first = link[ i ] ^ -1;
+			link[ first ] ^= i ^ -1;
+			return;
+		}
+
+		if ( last == i ) {
+			last = link[ i ] ^ -1;
+			link[ last ] ^= i ^ -1;
+			return;
+		}
+
+		int j = first, prev = -1, next;
+		while( ( next = link[ j ] ^ prev ) != i ) {
+			prev = j;
+			j = next;
+		}
+		link[ j ] ^= link[ i ] ^ i ^ j;
+		link[ link[ i ] ^ j ] ^= i ^ j;
+	}
+
+
+	/** Returns the first key of this map in iteration order.
+	 *
+	 * @return the first key in iteration order.
+	 */
+	public KEY_GENERIC_TYPE FIRST_KEY() {
+		if ( count == 0 ) throw new NoSuchElementException();
+		return key[ first ];
+	}
+
+
+	/** Returns the last key of this map in iteration order.
+	 *
+	 * @return the last key in iteration order.
+	 */
+	public KEY_GENERIC_TYPE LAST_KEY() {
+		if ( count == 0 ) throw new NoSuchElementException();
+		return key[ last ];
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; }
+
+	public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); }
+	public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+	public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+
+
+
+	/** A list iterator over a linked map.
+	 *
+	 * <P>This class provides a list iterator over a linked hash map. The empty constructor runs in 
+	 * constant time. The one-argoument constructor needs to search for the given key, but it is 
+	 * optimized for the case of {@link java.util.SortedMap#lastKey()}, in which case runs in constant time, too.
+	 */
+
+	private class MapIterator {
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). */
+		int prev = -1;
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). */
+		int next = -1;
+		/** The last entry that was returned (or -1 if we did not iterate or used {@link java.util.Iterator#remove()}). */
+		int curr = -1;
+		/** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this iterator has been created using the nonempty constructor.*/
+		int index = 0;
+
+		MapIterator() {
+			next = first;
+		}
+
+		MapIterator( final KEY_GENERIC_TYPE from ) {
+			if ( KEY_EQUALS( key[ last ], from ) ) {
+				prev = last;
+				index = count;
+			}
+			else {
+				if ( ! OPEN_DOUBLE_HASH_MAP.this.containsKey( from ) ) throw new IllegalArgumentException( "The key " + from + " does not belong to this set." );
+				next = first;
+				int e;
+				do e = nextEntry(); while( ! KEY_EQUALS( key[ e ], from ) );
+				curr = -1;
+			}
+		}
+					 
+		public boolean hasNext() { return next != -1; }
+		public boolean hasPrevious() { return prev != -1; }
+
+		public int nextIndex() {
+			return index;
+		}
+
+		public int previousIndex() {
+			return index - 1;
+		}
+					 
+		public int nextEntry() {
+			if ( ! hasNext() ) return size();
+
+			curr = next;
+			next = link[ curr ] ^ prev;
+			prev = curr;
+
+			index++;
+
+			return curr;
+		}
+
+		public int previousEntry() {
+			if ( ! hasPrevious() ) return -1;
+
+			curr = prev;
+			prev = link[ curr ] ^ next;
+			next = curr;
+
+			index--;
+
+			return curr;
+		}
+		
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			if ( curr == -1 ) throw new IllegalStateException();
+			state[ curr ] = REMOVED;
+
+#if #keys(reference)
+			key[ curr ] = KEY_GENERIC_CAST HashCommon.REMOVED;
+#endif
+
+#if #values(reference)
+			value[ curr ] = null;
+#endif
+
+			if ( curr == prev ) {
+				/* If the last operation was a next(), we are removing an entry that preceeds
+				   the current index, and thus we must decrement it. */
+				index--;
+				prev = link[ curr ] ^ next;
+			}
+			else next = link[ curr ] ^ prev; // curr == next
+
+			count--;
+			/* Now we manually fix the pointers. Because of our knowledge of next
+			   and prev, this is going to be faster than calling fixPointers(). */
+			if ( prev == -1 ) first = next;
+			else link[ prev ] ^= curr ^ next;
+			if ( next == -1 ) last = prev;
+			else link[ next ] ^= curr ^ prev;
+			curr = -1;
+		}
+
+		public int skip( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasNext() ) nextEntry(); 
+			return n - i - 1;
+		}
+
+		public int back( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasPrevious() ) previousEntry(); 
+			return n - i - 1;
+		}
+	}
+
+	private class EntryIterator extends MapIterator implements ObjectListIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		public EntryIterator() {}
+
+		public EntryIterator( KEY_GENERIC_TYPE from ) {
+			super( from );
+		}
+
+		public MapEntry next() {
+			return new MapEntry( nextEntry() );
+		}
+
+		public MapEntry previous() {
+			return new MapEntry( previousEntry() );
+		}
+
+		public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+		public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+	}
+
+	private class FastEntryIterator extends MapIterator implements ObjectListIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		final BasicEntry KEY_VALUE_GENERIC entry = new BasicEntry KEY_VALUE_GENERIC ( KEY_NULL, VALUE_NULL );
+	
+		public FastEntryIterator() {}
+
+		public FastEntryIterator( KEY_GENERIC_TYPE from ) {
+			super( from );
+		}
+
+		public BasicEntry KEY_VALUE_GENERIC next() {
+			final int e = nextEntry();
+			entry.key = key[ e ];
+			entry.value = value[ e ];
+			return entry;
+		}
+
+		public BasicEntry KEY_VALUE_GENERIC previous() {
+			final int e = previousEntry();
+			entry.key = key[ e ];
+			entry.value = value[ e ];
+			return entry;
+		}
+
+		public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+		public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+	}
+
+#else	 
+
+	/** An iterator over a hash map. */
+
+	private class MapIterator {
+		/** The index of the next entry to be returned. */
+		int pos = 0;
+		/** The index of the last entry that has been returned. */
+		int last = -1;
+		/** A downward counter measuring how many entries have been returned. */
+		int c = count;
+		
+		{ 
+			final byte state[] = OPEN_DOUBLE_HASH_MAP.this.state;
+			final int n = state.length;
+			
+			if ( c != 0 ) while( pos < n && state[ pos ] != OCCUPIED ) pos++;
+		}
+		
+		public boolean hasNext() {
+			return c != 0 && pos < OPEN_DOUBLE_HASH_MAP.this.state.length;
+		}
+		
+		public int nextEntry() {
+			final byte state[] = OPEN_DOUBLE_HASH_MAP.this.state;
+			final int n = state.length;
+			
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			last = pos;
+			if ( --c != 0 ) do pos++; while( pos < n && state[ pos ] != OCCUPIED );
+			
+			return last;
+		}
+
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			if (last == -1) throw new IllegalStateException();
+			state[last] = REMOVED;
+#if #keys(reference)
+			key[last] = KEY_GENERIC_CAST HashCommon.REMOVED;
+#endif
+#if #values(reference)
+			value[last] = null;
+#endif
+			
+			count--;
+		}
+
+		public int skip( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasNext() ) nextEntry(); 
+			return n - i - 1;
+		}
+	}
+
+
+	private class EntryIterator extends MapIterator implements ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		public MAP.Entry KEY_VALUE_GENERIC next() {
+			return new MapEntry( nextEntry() );
+		}
+	}
+
+	private class FastEntryIterator extends MapIterator implements ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		final BasicEntry KEY_VALUE_GENERIC entry = new BasicEntry KEY_VALUE_GENERIC ( KEY_NULL, VALUE_NULL );
+		public BasicEntry KEY_VALUE_GENERIC next() {
+			final int e = nextEntry();
+			entry.key = key[ e ];
+			entry.value = value[ e ];
+			return entry;
+		}
+	}
+
+#endif
+
+
+	@SuppressWarnings("unchecked")
+	public boolean containsKey( KEY_TYPE k ) {
+		return findKey( KEY_GENERIC_CAST k ) >= 0;
+	}
+	 
+	public int size() {
+		return count;
+	}
+
+	public boolean isEmpty() {
+		return count == 0;
+	}
+
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE GET_VALUE(final KEY_TYPE k) {
+		final int i = findKey( KEY_GENERIC_CAST k);
+
+		return i < 0 ? defRetValue : value[i];
+	}
+
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE REMOVE_VALUE(final KEY_TYPE k) {
+		final int i = findKey( KEY_GENERIC_CAST k );
+		if (i < 0) return defRetValue;
+		
+		state[i] = REMOVED;
+		count--;
+
+#if #keys(reference)
+		key[i] = KEY_GENERIC_CAST HashCommon.REMOVED;
+#endif
+#if #values(reference)
+		final VALUE_GENERIC_TYPE v = value[i];
+		value[i] = null;
+#endif
+
+#ifdef Linked
+		fixPointers( i );
+#endif
+#if #values(reference)
+		return v;
+#else
+		return value[i];
+#endif
+	}
+
+
+#if #keys(primitive)
+
+	public VALUE_GENERIC_CLASS get(final KEY_CLASS ok) {
+		final int i = findKey(KEY_CLASS2TYPE(ok));
+
+		return i < 0 ? OBJECT_DEFAULT_RETURN_VALUE : (VALUE_GENERIC_CLASS)VALUE2OBJ(value[i]);
+	}
+#endif
+
+#if #keys(primitive) || #values(primitive)
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_CLASS remove( final Object ok ) {
+		final int i = findKey( KEY_GENERIC_CAST KEY_OBJ2TYPE( ok ) );
+		if (i < 0) return OBJECT_DEFAULT_RETURN_VALUE;
+
+		state[i] = REMOVED;
+		count--;
+
+#if #keys(reference)
+		key[i] = KEY_GENERIC_CAST HashCommon.REMOVED;
+#endif
+
+#if #values(reference)
+		final VALUE_GENERIC_CLASS v = value[i];
+		value[i] = null;
+#endif
+
+#ifdef Linked
+		fixPointers( i );
+#endif
+
+		if ( ASSERTS ) checkTable();
+
+#if #values(reference)
+		return v;
+#else
+		return VALUE2OBJ( value[i] );
+#endif
+	}
+
+
+#endif
+
+
+
+
+#ifdef Linked
+	private final class MapEntrySet extends AbstractObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> implements FastSortedEntrySet KEY_VALUE_GENERIC {
+
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+			return new EntryIterator();
+		}
+
+		public Comparator<? super MAP.Entry KEY_VALUE_GENERIC> comparator() { return null; }
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> subSet( MAP.Entry KEY_VALUE_GENERIC fromElement, MAP.Entry KEY_VALUE_GENERIC toElement) { throw new UnsupportedOperationException(); }
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> headSet( MAP.Entry KEY_VALUE_GENERIC toElement ) { throw new UnsupportedOperationException(); }
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> tailSet( MAP.Entry KEY_VALUE_GENERIC fromElement ) { throw new UnsupportedOperationException(); }
+
+		public MAP.Entry KEY_VALUE_GENERIC first() { 
+			if ( count == 0 ) throw new NoSuchElementException();
+			return new MapEntry( OPEN_DOUBLE_HASH_MAP.this.first ); 
+		}
+
+		public MAP.Entry KEY_VALUE_GENERIC last() { 
+			if ( count == 0 ) throw new NoSuchElementException();
+			return new MapEntry( OPEN_DOUBLE_HASH_MAP.this.last ); 
+		}
+		
+#else
+	private final class MapEntrySet extends AbstractObjectSet<MAP.Entry KEY_VALUE_GENERIC> implements FastEntrySet KEY_VALUE_GENERIC {
+
+		public ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+			return new EntryIterator();
+		}
+
+		public ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator() {
+			return new FastEntryIterator();
+		}
+#endif					 
+					 
+		@SuppressWarnings("unchecked")
+		public boolean contains( final Object o ) {
+			if (!(o instanceof Map.Entry)) return false;
+			final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+			final int i = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+			return i >= 0 && VALUE_EQUALS( value[ i ], VALUE_CLASS2TYPE( e.getValue() ) );
+		}
+			 
+		@SuppressWarnings("unchecked")
+		public boolean remove( final Object o ) {
+			if (!(o instanceof Map.Entry)) return false;
+			final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+			final int i = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+			if ( i >= 0 ) OPEN_DOUBLE_HASH_MAP.this.remove( e.getKey() );
+			return i >= 0;
+		}
+			 
+		public int size() {
+			return count;
+		}
+			 
+		public void clear() {
+			OPEN_DOUBLE_HASH_MAP.this.clear();
+		}
+
+#ifdef Linked
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator( final MAP.Entry KEY_VALUE_GENERIC from ) {
+			return new EntryIterator( KEY_CLASS2TYPE( from.getKey() ) );
+		}
+
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator() {
+			return new FastEntryIterator();
+		}
+				
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator( final MAP.Entry KEY_VALUE_GENERIC from ) {
+			return new FastEntryIterator( KEY_CLASS2TYPE( from.getKey() ) );
+		}
+				
+#endif
+	}
+
+
+#ifdef Linked
+	public FastSortedEntrySet KEY_VALUE_GENERIC ENTRYSET() {
+		if ( entries == null ) entries = new MapEntrySet();
+#else
+	public FastEntrySet KEY_VALUE_GENERIC ENTRYSET() {
+		if ( entries == null ) entries = new MapEntrySet();
+#endif
+		return entries;
+	}
+
+
+	/** An iterator on keys.
+	 *
+	 * <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
+	 * (and possibly their type-specific counterparts) so that they return keys
+	 * instead of entries.
+	 */
+
+#ifdef Linked
+	private final class KeyIterator extends MapIterator implements KEY_LIST_ITERATOR KEY_GENERIC {
+		public KeyIterator( final KEY_GENERIC_TYPE k ) { super( k ); }
+		public KEY_GENERIC_TYPE PREV_KEY() { return key[ previousEntry() ]; }
+		public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+#if ! #keys(reference)
+		public KEY_GENERIC_CLASS previous() { return KEY2OBJ( key[ previousEntry() ] ); }
+		public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+		public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif
+
+#else
+	private final class KeyIterator extends MapIterator implements KEY_ITERATOR KEY_GENERIC {
+#endif
+
+		public KeyIterator() { super(); }
+		public KEY_GENERIC_TYPE NEXT_KEY() { return key[ nextEntry() ]; }
+#if ! #keys(reference)
+		public KEY_GENERIC_CLASS next() { return KEY2OBJ( key[ nextEntry() ] ); }
+#endif
+	}
+
+
+
+#ifdef Linked
+	private final class KeySet extends ABSTRACT_SORTED_SET KEY_GENERIC {
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) {
+			return new KeyIterator( from );
+		}
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() {
+			return new KeyIterator();
+		}
+#else
+	private final class KeySet extends ABSTRACT_SET KEY_GENERIC {
+
+		public KEY_ITERATOR KEY_GENERIC iterator() {
+			return new KeyIterator();
+		}
+#endif
+
+		public int size() {
+			return count;
+		}
+
+		public boolean contains( KEY_TYPE k ) {
+			return containsKey( k );
+		}
+					 
+		public boolean remove( KEY_TYPE k ) {
+			int oldCount = count;
+			OPEN_DOUBLE_HASH_MAP.this.remove( k );
+			return count != oldCount;
+		}
+					 
+		public void clear() {
+			OPEN_DOUBLE_HASH_MAP.this.clear();
+		}
+
+
+#ifdef Linked
+		public KEY_GENERIC_TYPE FIRST() {
+			if ( count == 0 ) throw new NoSuchElementException();
+			return key[ first ];
+		}
+
+		public KEY_GENERIC_TYPE LAST() {
+			if ( count == 0 ) throw new NoSuchElementException();
+			return key[ last ];
+		}
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; }
+
+		final public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); }
+		final public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+		final public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+#endif
+	}
+
+
+#ifdef Linked
+	public SORTED_SET KEY_GENERIC keySet() {
+#else
+	public SET KEY_GENERIC keySet() {
+#endif
+		if ( keys == null ) keys = new KeySet();
+		return keys;
+	}
+
+
+	/** An iterator on values.
+	 *
+	 * <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
+	 * (and possibly their type-specific counterparts) so that they return values
+	 * instead of entries.
+	 */
+
+#ifdef Linked
+	private final class ValueIterator extends MapIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC {
+		public VALUE_GENERIC_TYPE PREV_VALUE() { return value[ previousEntry() ]; }
+
+#if ! #values(reference)
+		public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( value[ previousEntry() ] ); }
+		public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+		public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif									
+		public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+		public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+#else
+	private final class ValueIterator extends MapIterator implements VALUE_ITERATOR VALUE_GENERIC {
+#endif
+
+		public ValueIterator() { super(); }
+		public VALUE_GENERIC_TYPE NEXT_VALUE() { return value[ nextEntry() ]; }
+#if ! #values(reference)
+		public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( value[ nextEntry() ] ); }
+#endif
+	}
+
+	public VALUE_COLLECTION VALUE_GENERIC values() {
+		if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() {
+
+				public VALUE_ITERATOR VALUE_GENERIC iterator() {
+					return new ValueIterator();
+				}
+
+				public int size() {
+					return count;
+				}
+
+				public boolean contains( VALUE_TYPE v ) {
+					return containsValue( v );
+				}
+
+				public void clear() {
+					OPEN_DOUBLE_HASH_MAP.this.clear();
+				}
+			};
+
+		return values;
+	}
+
+
+	/** Rehashes this map without changing the table size.
+	 * <P>This method should be called when the map underwent numerous deletions and insertions.
+	 * In this case, free entries become rare, and unsuccessful searches
+	 * require probing <em>all</em> entries. For reasonable load factors this method is linear in the number of entries.
+	 * You will need as much additional free memory as
+	 * that occupied by the table.
+	 *
+	 * <P>If you need to reduce the table siza to fit exactly
+	 * this map, you must use {@link #trim()}.
+	 *
+	 * @return <code>true</code> if there was enough memory to rehash the map, <code>false</code> otherwise.
+	 * @see #trim()
+	 */
+
+	public boolean rehash() {
+		try {
+			rehash(p);
+		}
+		catch(OutOfMemoryError cantDoIt) { return false; }
+		return true;
+	}
+
+
+	/** Rehashes the map, making the table as small as possible.
+	 * 
+	 * <P>This method rehashes to the smallest size satisfying
+	 * the load factor. It can be used when the map will not be
+	 * changed anymore, so to optimize access speed (by collecting
+	 * deleted entries) and size.
+	 *
+	 * <P>If the table size is already the minimum possible, this method
+	 * does nothing. If you want to guarantee rehashing, use {@link #rehash()}.
+	 *
+	 * @return true if there was enough memory to trim the map.
+	 * @see #trim(int)
+	 * @see #rehash()
+	 */
+
+	public boolean trim() {
+		int l = Arrays.binarySearch( PRIMES, (int)( count / f ) + 1 );
+		if ( l < 0 ) l = -l - 1;
+		if ( l >= p ) return true;
+		try {
+			rehash( l );
+		}
+		catch(OutOfMemoryError cantDoIt) { return false; }
+		return true;
+	}
+
+
+	/** Rehashes this map if the table is too large.
+	 * 
+	 * <P>Let <var>N</var> be the smallest table size that can hold
+	 * <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current
+	 * table size is smaller than or equal to <var>N</var>, this method does
+	 * nothing. Otherwise, it rehashes this map in a table of size
+	 * <var>N</var>.
+	 *
+	 * <P>This method is useful when reusing maps.  {@linkplain #clear() Clearing a
+	 * map} leaves the table size untouched. If you are reusing a map
+	 * many times, you can call this method with a typical
+	 * size to avoid keeping around a very large table just
+	 * because of a few large transient maps.
+	 *
+	 * @param n the threshold for the trimming.
+	 * @return true if there was enough memory to trim the map.
+	 * @see #trim()
+	 * @see #rehash()
+	 */
+
+	public boolean trim( final int n ) {
+		int l = Arrays.binarySearch( PRIMES, (int)( Math.min( Integer.MAX_VALUE - 1, Math.max( n, count ) / f ) ) + 1 );
+		if ( l < 0 ) l = -l - 1;
+		if ( p <= l ) return true;
+		try {
+			rehash( l );
+		}
+		catch( OutOfMemoryError cantDoIt ) { return false; }
+		return true;
+	}
+
+	/** Resizes the map.
+	 *
+	 * <P>This method implements the basic rehashing strategy, and may be
+	 * overriden by subclasses implementing different rehashing strategies (e.g.,
+	 * disk-based rehashing). However, you should not override this method
+	 * unless you understand the internal workings of this class.
+	 *
+	 * @param newP the new size as an index in {@link Hash#PRIMES}.
+	 */
+
+	@SuppressWarnings("unchecked")
+	protected void rehash( final int newP ) {
+#ifdef Linked
+		int i = first, j = count, prev = -1, newPrev = -1, t, k2i, h1, h2;
+#else
+		int i = 0, j = count, k2i, h1, h2;
+		final byte state[] = this.state;
+#endif
+
+		KEY_GENERIC_TYPE k;
+		VALUE_GENERIC_TYPE v;
+
+		final int newN = PRIMES[newP];
+		final KEY_GENERIC_TYPE key[] = this.key, newKey[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[newN];
+		final VALUE_GENERIC_TYPE value[] = this.value, newValue[] = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[newN];
+		final byte newState[] = new byte[newN];
+#ifdef Linked
+		final int link[] = this.link, newLink[] = new int[ newN ];
+		first = -1;
+#endif
+		while(j-- != 0) {
+
+#ifndef Linked
+			while(state[i] != OCCUPIED ) i++;
+#endif
+
+			k = key[i];
+			v = value[i];
+			k2i = KEY2INTHASH(k) & 0x7FFFFFFF;
+
+			h1 = k2i % newN;
+			h2 = (k2i % (newN - 2)) + 1;
+
+			if ( newState[h1] != FREE ) {
+				h2 = (k2i % (newN - 2)) + 1;
+				do {
+					h1 += h2;
+					if ( h1 >= newN || h1 < 0 ) h1 -= newN;
+				} while( newState[h1] != FREE );
+			}
+
+			newState[h1] = OCCUPIED;
+			newKey[h1] = k;
+			newValue[h1] = v;
+
+#ifdef Linked
+			t = i;
+			i = link[ i ] ^ prev;
+			prev = t;
+
+			if ( first != -1 ) {
+				newLink[ newPrev ] ^= h1;
+				newLink[ h1 ] = newPrev;
+				newPrev = h1;
+			}
+			else {
+				newPrev = first = h1;
+				newLink[ h1 ] = -1;
+			}
+#else
+			i++;
+#endif
+		}
+
+		p = newP;
+		free = newN - count;
+		maxFill = (int)( newN * f );
+		this.key = newKey;
+		this.value = newValue;
+		this.state = newState;
+#ifdef Linked
+		this.link = newLink;
+		this.last = newPrev;
+		if ( newPrev != -1 ) newLink[ newPrev ] ^= -1; 
+#endif
+	}
+	 
+
+	/** Returns a deep copy of this map. 
+	 *
+	 * <P>This method performs a deep copy of this hash map; the data stored in the
+	 * map, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 *  @return a deep copy of this map.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public OPEN_DOUBLE_HASH_MAP KEY_VALUE_GENERIC clone() {
+		OPEN_DOUBLE_HASH_MAP KEY_VALUE_GENERIC c;
+		try {
+			c = (OPEN_DOUBLE_HASH_MAP KEY_VALUE_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+
+		c.keys = null;
+		c.values = null;
+		c.entries = null;
+
+		c.key = key.clone();
+		c.value = value.clone();
+		c.state = state.clone();
+#ifdef Linked
+		c.link = link.clone();
+#endif
+#ifdef Custom
+		c.strategy = strategy;
+#endif
+		return c;
+	}
+
+
+	/** Returns a hash code for this map.
+	 *
+	 * This method overrides the generic method provided by the superclass. 
+	 * Since <code>equals()</code> is not overriden, it is important
+	 * that the value returned by this method is the same value as
+	 * the one returned by the overriden method.
+	 *
+	 * @return a hash code for this map.
+	 */
+
+	public int hashCode() {
+		int h = 0, t, i = 0, j = count;
+		while( j-- != 0 ) {
+			while( state[ i ] != OCCUPIED ) i++;
+			t = 0;
+#if #keys(reference)
+			if ( this != key[ i ] )
+#endif
+				t = KEY2JAVAHASH( key[ i ] );
+#if #values(reference)
+			if ( this != value[ i ] )
+#endif
+				t ^=  VALUE2JAVAHASH( value[ i ] );
+			h += t;
+			i++;
+		}
+		return h;
+	}
+
+
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final VALUE_GENERIC_TYPE value[] = this.value;
+		final MapIterator i = new MapIterator();
+		int e, j = count;
+
+		s.defaultWriteObject();
+
+		while( j-- != 0 ) {
+			e = i.nextEntry();
+			s.WRITE_KEY( key[ e ] );
+			s.WRITE_VALUE( value[ e ] );
+		}
+	}
+
+
+
+	@SuppressWarnings("unchecked")
+	private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		// We restore the default growth factor.
+		growthFactor = Hash.DEFAULT_GROWTH_FACTOR;
+		// Note that we DO NOT USE the stored p. See CHANGES.
+		p = Arrays.binarySearch( PRIMES, (int)( count / f ) + 1 );
+		if ( p < 0 ) p = -p - 1;
+
+		final int n = PRIMES[ p ];
+		maxFill = (int)( n * f );
+		free = n - count;;
+		
+		final KEY_GENERIC_TYPE key[] = this.key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n ];
+		final VALUE_GENERIC_TYPE value[] = this.value = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[ n ];
+		final byte state[] = this.state = new byte[ n ];
+#ifdef Linked
+		final int link[] = this.link = new int[ n ];
+		int prev = -1;
+		first = last = -1;
+#endif
+
+		int i, k2i, h1, h2;
+		KEY_GENERIC_TYPE k;
+		VALUE_GENERIC_TYPE v;
+
+		i = count;
+		while( i-- != 0 ) {
+
+			k = KEY_GENERIC_CAST s.READ_KEY();
+			v = VALUE_GENERIC_CAST s.READ_VALUE();
+			k2i = KEY2INTHASH( k ) & 0x7FFFFFFF;
+
+			h1 = k2i % n;
+
+			if ( state[ h1 ] != FREE ) {
+				h2 = ( k2i % ( n - 2 ) ) + 1;
+				do {
+					h1 += h2;
+					if ( h1 >= n || h1 < 0 ) h1 -= n;
+				} while( state[ h1 ] != FREE );
+			}
+
+			state[ h1 ] = OCCUPIED;
+			key[ h1 ] = k;
+			value[ h1 ] = v;
+
+#ifdef Linked
+			if ( first != -1 ) {
+				link[ prev ] ^= h1;
+				link[ h1 ] = prev;
+				prev = h1;
+			}
+			else {
+				prev = first = h1;
+				link[ h1 ] = -1;
+			}
+#endif
+		}
+
+#ifdef Linked
+		last = prev;
+		if ( prev != -1 ) link[ prev ] ^= -1; 
+#endif
+
+		if ( ASSERTS ) checkTable();
+	}
+
+
+#ifdef ASSERTS_CODE
+	private void checkTable() {
+		int n = state.length;
+		while( n-- != 0 ) 
+			if ( state[ n ] == OCCUPIED && ! containsKey( key[ n ] ) ) 
+				throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" );
+
+#ifdef Linked
+		KEY_BIDI_ITERATOR KEY_GENERIC i = keySet().iterator();
+		KEY_TYPE k;
+		n = size();
+		while( n-- != 0 ) 
+			if ( ! containsKey( k = i.NEXT_KEY() ) ) 
+				throw new AssertionError( "Linked hash table forward enumerates key " + k + ", but the key does not belong to the table" );
+
+		if ( i.hasNext() ) throw new AssertionError( "Forward iterator not exhausted" );
+
+		n = size();
+		if ( n > 0 ) {
+			i = keySet().iterator( LAST_KEY() );
+			while( n-- != 0 ) 
+				if ( ! containsKey( k = i.PREV_KEY() ) ) 
+					throw new AssertionError( "Linked hash table backward enumerates key " + k + ", but the key does not belong to the table" );
+			
+			if ( i.hasPrevious() ) throw new AssertionError( "Previous iterator not exhausted" );
+		}
+#endif
+	}
+#else
+	private void checkTable() {}
+#endif
+
+
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif !#keyclass(Reference)
+#ifdef Custom
+		int i = r.nextInt( 3 );
+		byte a[] = new byte[ i ];
+		while( i-- != 0 ) a[ i ] = (byte)r.nextInt();
+		return a;
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static VALUE_TYPE genValue() {
+#if #valueclass(Byte) || #valueclass(Short) || #valueclass(Character)
+		return (VALUE_TYPE)(r.nextInt());
+#elif #values(primitive)
+		return r.NEXT_VALUE();
+#elif !#valueclass(Reference)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static final class ArrayComparator implements java.util.Comparator {
+		public int compare( Object a, Object b ) {
+			byte[] aa = (byte[])a;
+			byte[] bb = (byte[])b;
+			int length = Math.min( aa.length, bb.length );
+			for( int i = 0; i < length; i++ ) {
+				if ( aa[ i ] < bb[ i ] ) return -1;
+				if ( aa[ i ] > bb[ i ] ) return 1;
+			}
+			return aa.length == bb.length ? 0 : ( aa.length < bb.length ? -1 : 1 );
+		}
+	}
+
+	private static final class MockMap extends java.util.TreeMap {
+		private java.util.List list = new java.util.ArrayList();
+
+		public MockMap( java.util.Comparator c ) { super( c ); }
+
+		public Object put( Object k, Object v ) {
+			if ( ! containsKey( k ) ) list.add( k );
+			return super.put( k, v );
+		}
+
+		public void putAll( Map m ) {
+			java.util.Iterator i = m.entrySet().iterator();
+			while( i.hasNext() ) {
+				Map.Entry e = (Map.Entry)i.next();
+				put( e.getKey(), e.getValue() );
+			}
+		}
+
+		public Object remove( Object k ) {
+			if ( containsKey( k ) ) {
+				int i = list.size();
+				while( i-- != 0 ) if ( comparator().compare( list.get( i ), k ) == 0 ) {
+					list.remove( i );
+					break;
+				}
+			}
+			return super.remove( k );
+		}
+
+		private void justRemove( Object k ) { super.remove( k ); }
+		private java.util.Set justEntrySet() { return super.entrySet(); }
+		private java.util.Set justKeySet() { return super.keySet(); }
+
+		public java.util.Set keySet() {
+			return new java.util.AbstractSet() {
+					final java.util.Set keySet = justKeySet();
+					
+					public boolean contains( Object k ) { return keySet.contains( k ); }
+					public int size() { return keySet.size(); }
+					public java.util.Iterator iterator() {
+						return new java.util.Iterator() {
+								final java.util.Iterator iterator = list.iterator();
+								Object curr;
+								public Object next() { return curr = iterator.next(); }
+								public boolean hasNext() { return iterator.hasNext(); }
+								public void remove() { 
+									justRemove( curr );
+									iterator.remove(); 
+								}
+							};
+
+					}
+				};
+
+		}
+
+		public java.util.Set entrySet() {
+			return new java.util.AbstractSet() {
+					final java.util.Set entrySet = justEntrySet();
+					
+					public boolean contains( Object k ) { return entrySet.contains( k ); }
+					public int size() { return entrySet.size(); }
+					public java.util.Iterator iterator() {
+						return new java.util.Iterator() {
+								final java.util.Iterator iterator = list.iterator();
+								Object curr;
+								public Object next() { 
+									curr = iterator.next();
+#if #valueclass(Reference)
+#if #keyclass(Reference)
+									return new ABSTRACT_MAP.BasicEntry( (Object)curr, (Object)get(curr) ) {
+#else
+									return new ABSTRACT_MAP.BasicEntry( (KEY_CLASS)curr, (Object)get(curr) ) {
+#endif
+#else
+#if #keyclass(Reference)
+									return new ABSTRACT_MAP.BasicEntry( (Object)curr, (VALUE_CLASS)get(curr) ) {
+#else
+									return new ABSTRACT_MAP.BasicEntry( (KEY_CLASS)curr, (VALUE_CLASS)get(curr) ) {
+#endif
+#endif
+											public VALUE_TYPE setValue( VALUE_TYPE v ) {
+												return VALUE_OBJ2TYPE(put( getKey(), VALUE2OBJ(v) ));
+											}
+										}; 
+								}
+								public boolean hasNext() { return iterator.hasNext(); }
+								public void remove() { 
+									justRemove( ((Map.Entry)curr).getKey() );
+									iterator.remove(); 
+								}
+							};
+
+					}
+				};
+
+		}
+
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+
+	private static void speedTest( int n, float f, boolean comp ) {
+#ifndef Custom
+		int i, j;
+		OPEN_DOUBLE_HASH_MAP m;
+#ifdef Linked
+		java.util.LinkedHashMap t;
+#else
+		java.util.HashMap t;
+#endif
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		VALUE_TYPE v[] = new VALUE_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+			v[i] = genValue();
+		}
+
+		double totPut = 0, totYes = 0, totNo = 0, totIter = 0, totRemYes = 0, totRemNo = 0, d;
+
+		if ( comp ) { for( j = 0; j < 20; j++ ) {
+
+#ifdef Linked
+			t = new java.util.LinkedHashMap( 16 );
+#else
+			t = new java.util.HashMap( 16 );
+#endif
+
+			/* We put pairs to t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totPut += d; 				
+			System.out.print("Put: " + format( d ) +" K/s " );
+
+			/* We check for pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.containsKey( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.containsKey( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on t. */
+			ms = System.currentTimeMillis();
+			for( java.util.Iterator it = t.entrySet().iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 				
+			System.out.print("Iter: " + format( d ) +" K/s " );
+				
+			/* We delete pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 				
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+				
+			/* We delete pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+		System.out.println();
+		System.out.println( "java.util Put: " + format( totPut/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + "K/s" );
+
+		System.out.println();
+
+		totPut = totYes = totNo = totIter = totRemYes = totRemNo = 0;
+
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new OPEN_DOUBLE_HASH_MAP( 16, f );
+
+			/* We put pairs to m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.put( k[i], v[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totPut += d; 				
+			System.out.print("Put: " + format( d ) +" K/s " );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.containsKey( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.containsKey( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on m. */
+			ms = System.currentTimeMillis();
+			for( java.util.Iterator it = m.entrySet().iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 	 
+			System.out.print("Iter: " + format( d ) +" K/s " );
+
+			/* We delete pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 	
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+
+			/* We delete pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );	 
+
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Put: " + format( totPut/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s" );
+
+		System.out.println();
+#endif
+	}
+
+	private static boolean valEquals(Object o1, Object o2) {
+		return o1 == null ? o2 == null : o1.equals(o2);
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	protected static void test( int n, float f ) {
+#ifdef Custom
+		OPEN_DOUBLE_HASH_MAP m = new OPEN_DOUBLE_HASH_MAP(Hash.DEFAULT_INITIAL_SIZE, f, it.unimi.dsi.fastutil.bytes.ByteArrays.HASH_STRATEGY);
+#else
+		OPEN_DOUBLE_HASH_MAP m = new OPEN_DOUBLE_HASH_MAP(Hash.DEFAULT_INITIAL_SIZE, f);
+#endif
+
+#ifdef Linked
+#ifdef Custom
+		Map t = new MockMap( new ArrayComparator() );
+#else
+		Map t = new java.util.LinkedHashMap();
+#endif
+#else
+#ifdef Custom
+		Map t = new java.util.TreeMap(new ArrayComparator());
+#else
+		Map t = new java.util.HashMap();
+#endif
+#endif
+
+		/* First of all, we fill t with random data. */
+
+		for(int i=0; i<n;  i++ ) t.put( KEY2OBJ(genKey()), VALUE2OBJ(genValue()) );
+		  
+		/* Now we add to m the same data */
+		  
+		m.putAll(t);
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after insertion");
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			if (!valEquals(e.getValue(), m.get(e.getKey()))) 
+				System.out.println("Error (" + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			if (!valEquals(e.getValue(), t.get(e.getKey()))) 
+				System.out.println("Error (" + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on m)");
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		  
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!m.containsKey(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+o+") after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+			if (!m.keySet().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+o+", in keySet()) after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!t.containsKey(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+			if (!t.keySet().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key (in keySet()) after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		  
+		for(java.util.Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!m.containsValue(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+			if (!m.values().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value (in values()) after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!t.containsValue(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+			if (!t.values().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value (in values()) after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.containsKey(KEY2OBJ(T)) != t.containsKey(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in keys between t and m (polymorphic method)");
+				System.exit( 1 );
+			}
+
+#if ( #keys(reference) ) && ! ( #values(reference) )
+			if ((m.GET_VALUE(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+				t.get(KEY2OBJ(T)) != null && 
+				! VALUE2OBJ(m.GET_VALUE(T)).equals(t.get(KEY2OBJ(T)))) 
+#else
+				if ((m.get(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+					t.get(KEY2OBJ(T)) != null && 
+					! m.get(KEY2OBJ(T)).equals(t.get(KEY2OBJ(T)))) 
+#endif
+					{
+						System.out.println("Error (" + seed + "): divergence between t and m (polymorphic method)");
+						System.exit( 1 );
+					}
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (!valEquals(m.get(KEY2OBJ(T)), t.get(KEY2OBJ(T)))) {
+				System.out.println("Error (" + seed + "): divergence between t and m (standard method)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			VALUE_TYPE U = genValue();
+			if (!valEquals(m.put(KEY2OBJ(T), VALUE2OBJ(U)), t.put(KEY2OBJ(T), VALUE2OBJ(U)))) {
+				System.out.println("Error (" + seed + "): divergence in put() between t and m");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (!valEquals(m.remove(KEY2OBJ(T)), t.remove(KEY2OBJ(T)))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after removal");
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(java.util.Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			if (!valEquals(e.getValue(), m.get(e.getKey()))) {
+				System.out.println("Error (" + seed + "): m and t differ on an entry ("+e+") after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			if (!valEquals(e.getValue(), t.get(e.getKey()))) {
+				System.out.println("Error (" + seed + "): m and t differ on an entry ("+e+") after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		  
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!m.containsKey(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+o+") after removal (iterating on t)");
+				System.exit( 1 );
+			}
+			if (!m.keySet().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+o+", in keySet()) after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!t.containsKey(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key after removal (iterating on m)");
+				System.exit( 1 );
+			}
+			if (!t.keySet().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key (in keySet()) after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		  
+		for(java.util.Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!m.containsValue(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value after removal (iterating on t)");
+				System.exit( 1 );
+			}
+			if (!m.values().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value (in values()) after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!t.containsValue(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value after removal (iterating on m)");
+				System.exit( 1 );
+			}
+			if (!t.values().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value (in values()) after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+
+		int h = m.hashCode();
+
+
+		/* Now we save and read m. */
+
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m = (OPEN_DOUBLE_HASH_MAP)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+
+#if !#keyclass(Reference) && !#valueclass(Reference)
+		if (m.hashCode() != h) System.out.println("Error (" + seed + "): hashCode() changed after save/read");
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!valEquals(m.get(o),t.get(o))) {
+				System.out.println("Error (" + seed + "): m and t differ on an entry after save/read");
+				System.exit( 1 );
+			}
+		}
+#else
+		m.clear();
+		m.putAll( t );
+#endif
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			VALUE_TYPE U = genValue();
+			if (!valEquals(m.put(KEY2OBJ(T), VALUE2OBJ(U)), t.put(KEY2OBJ(T), VALUE2OBJ(U)))) {
+				System.out.println("Error (" + seed + "): divergence in put() between t and m after save/read");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (!valEquals(m.remove(KEY2OBJ(T)), t.remove(KEY2OBJ(T)))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m after save/read");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after post-save/read removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after post-save/read removal");
+
+
+
+#ifdef Linked
+
+
+		/* Now we play with iterators. */
+
+		{
+			java.util.ListIterator i, j;
+			Object J;
+			Map.Entry E, F;
+			i = (java.util.ListIterator)m.entrySet().iterator(); 
+			j = new java.util.LinkedList( t.entrySet() ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( (E=(java.util.Map.Entry)i.next()).getKey(),  J = (F=(Map.Entry)j.next()).getKey() ), "Error (" + seed + "): divergence in next()" );
+#else
+					ensure( (E=(java.util.Map.Entry)i.next()).getKey().equals( J = (F=(Map.Entry)j.next()).getKey() ), "Error (" + seed + "): divergence in next()" );
+#endif
+
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Object U = VALUE2OBJ(genValue());
+						E.setValue( U );
+                        t.put( F.getKey(), U );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( (E=(java.util.Map.Entry)i.previous()).getKey(), J = (F=(Map.Entry)j.previous()).getKey() ), "Error (" + seed + "): divergence in previous()" );
+#else
+					ensure( (E=(java.util.Map.Entry)i.previous()).getKey().equals( J = (F=(Map.Entry)j.previous()).getKey() ), "Error (" + seed + "): divergence in previous()" );
+#endif
+
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Object U = VALUE2OBJ(genValue());
+						E.setValue( U );
+                        t.put( F.getKey(), U );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+		  
+		if ( t.size() > 0 ) {
+			java.util.ListIterator i, j;
+			Object J;
+			j = new java.util.LinkedList( t.keySet() ).listIterator();
+			int e = r.nextInt( t.size() );
+			Object from;
+			do from = j.next(); while( e-- != 0 );
+
+			i = (java.util.ListIterator)((SORTED_SET)m.keySet()).iterator( KEY_OBJ2TYPE( from ) ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.next(), J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+#else
+					ensure( i.next().equals( J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+#endif
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.previous(), J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+#else
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+#endif
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex() (iterator with starting point " + from + ")" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex() (iterator with starting point " + from + ")" );
+
+			}
+
+		}
+		  
+
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + seed + "): ! t.equals( m ) after iteration" );
+
+#endif
+
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext(); ) m.remove(i.next()); 
+
+		if (!m.isEmpty())  {
+			System.out.println("Error (" + seed + "): m is not empty (as it should be)");
+			System.exit( 1 );
+		}
+
+#if (#keyclass(Integer) || #keyclass(Long)) && (#valueclass(Integer) || #valueclass(Long))
+		m = new OPEN_DOUBLE_HASH_MAP(n, f);
+		t.clear();
+		int x;
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		int p = m.state.length;
+
+		for(int i=0; i<p; i++) {
+			for (int j=0; j<20; j++) {
+				m.put(i+(r.nextInt() % 10)*p, 1);
+				m.remove(i+(r.nextInt() % 10)*p);
+			}
+
+			for (int j=-10; j<10; j++) m.remove(i+j*p);
+		}
+		  
+		t.putAll(m);
+
+		/* Now all table entries are REMOVED. */
+
+		for(int i=0; i<(p*f)/10; i++) {
+			for (int j=0; j<10; j++) {
+				if (!valEquals(m.put(KEY2OBJ(x = i+(r.nextInt() % 10)*p), VALUE2OBJ(1)), t.put(KEY2OBJ(x), VALUE2OBJ(1))))
+					System.out.println("Error (" + seed + "): m and t differ on an entry during torture-test insertion.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test insertion");
+
+		for(int i=0; i<p/10; i++) {
+			for (int j=0; j<10; j++) {
+				if (!valEquals(m.remove(KEY2OBJ(x = i+(r.nextInt() % 10)*p)), t.remove(KEY2OBJ(x))))
+					System.out.println("Error (" + seed + "): m and t differ on an entry during torture-test removal.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test removal");
+
+		if (!m.equals(m.clone())) System.out.println("Error (" + seed + "): !m.equals(m.clone()) after torture-test removal");
+		if (!((OPEN_DOUBLE_HASH_MAP)m.clone()).equals(m)) System.out.println("Error (" + seed + "): !m.clone().equals(m) after torture-test removal");
+
+		m.rehash();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after rehash()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after rehash()");
+
+		m.trim();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after trim()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after trim()");
+#endif
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		float f = Hash.DEFAULT_LOAD_FACTOR;
+		int n  = Integer.parseInt(args[1]);
+		if (args.length>2) f = Float.parseFloat(args[2]);
+		if ( args.length > 3 ) r = new java.util.Random( seed = Long.parseLong( args[ 3 ] ) );
+		  
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, f, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n, f);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+			
+	}
+
+#endif
+
+}
diff --git a/drv/OpenDoubleHashSet.drv b/drv/OpenDoubleHashSet.drv
new file mode 100644
index 0000000..2f88c6c
--- /dev/null
+++ b/drv/OpenDoubleHashSet.drv
@@ -0,0 +1,1986 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.HashCommon;
+import it.unimi.dsi.fastutil.bytes.ByteArrays;
+
+import java.util.Arrays;
+import java.util.Collection;
+#if #keys(primitive)
+import java.util.Iterator;
+#endif
+import java.util.NoSuchElementException;
+
+#ifdef Linked
+#if #keys(reference)
+import java.util.Comparator;
+#endif
+
+/**  A type-specific linked hash set with with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a set. The table is
+ * enlarged as needed when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <P>The enlargement speed is controlled by the <em>growth factor</em>, a
+ * positive number. If the growth factor is <var>p</var>, then the table is
+ * enlarged each time roughly by a factor 2<sup>p/16</sup>. By default, <var>p</var> is
+ * {@link Hash#DEFAULT_GROWTH_FACTOR}, which means that the table is doubled at
+ * each enlargement, but one can easily set more or less aggressive policies by
+ * calling {@link #growthFactor(int)} (note that the growth factor is <em>not</em> serialized:
+ * deserialized tables gets the {@linkplain Hash#DEFAULT_GROWTH_FACTOR default growth factor}).
+ *
+ * <P>Iterators created by this map will enumerate elements in the same order in which they
+ * have been added to the set (note that addition of elements already present 
+ * in the set does not change the iteration order). Note that this order has nothing in common with the natural
+ * order of the keys.
+ *
+ * <P>This class implements the interface of a sorted set, so to allow easy
+ * access of the iteration order: for instance, you can get the first element
+ * in iteration order with {@link #first()} without having to create an
+ * iterator; however, this class partially violates the {@link java.util.SortedSet}
+ * contract because all subset methods throw an exception and {@link
+ * #comparator()} returns always <code>null</code>.
+ *
+ * <P>The iterators provided by this class are type-specific {@linkplain
+ * java.util.ListIterator list iterators}.  However, creation of an iterator
+ * using a starting point is going to be very expensive, as the chosen starting
+ * point must be linearly searched for, unless it is {@link #last()}, in which
+ * case the iterator is created in constant time.
+ *
+ * <P>Note that deletions in a linked table require scanning the list until the
+ * element to be removed is found. The only exceptions are the first element, the last element,
+ * and deletions performed using an iterator.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_DOUBLE_HASH_SET KEY_GENERIC extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#else
+
+#ifdef Custom
+
+/**  A hash set with with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy}
+ * is specified at creation time.
+ *
+ * <P>Instances of this class use a hash table to represent a set. The table is
+ * enlarged as needed when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <P>The enlargement speed is controlled by the <em>growth factor</em>, a
+ * positive number. If the growth factor is <var>p</var>, then the table is
+ * enlarged each time roughly by a factor 2<sup>p/16</sup>. By default, <var>p</var> is
+ * {@link Hash#DEFAULT_GROWTH_FACTOR}, which means that the table is doubled at
+ * each enlargement, but one can easily set more or less aggressive policies by
+ * calling {@link #growthFactor(int)} (note that the growth factor is <em>not</em> serialized:
+ * deserialized tables gets the {@linkplain Hash#DEFAULT_GROWTH_FACTOR default growth factor}).
+ *
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_DOUBLE_HASH_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#else
+
+/**  A type-specific hash set with with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a set. The table is
+ * enlarged as needed when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <P>The enlargement speed is controlled by the <em>growth factor</em>, a
+ * positive number. If the growth factor is <var>p</var>, then the table is
+ * enlarged each time roughly by a factor 2<sup>p/16</sup>. By default, <var>p</var> is
+ * {@link Hash#DEFAULT_GROWTH_FACTOR}, which means that the table is doubled at
+ * each enlargement, but one can easily set more or less aggressive policies by
+ * calling {@link #growthFactor(int)} (note that the growth factor is <em>not</em> serialized:
+ * deserialized tables gets the {@linkplain Hash#DEFAULT_GROWTH_FACTOR default growth factor}).
+ *
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_DOUBLE_HASH_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#endif
+
+#endif
+
+	/** The array of keys. */
+	protected transient KEY_GENERIC_TYPE key[];
+	 
+	/** The array of occupancy states. */
+	protected transient byte state[];
+
+	/** The acceptable load factor. */
+	protected final float f;
+	 
+	/** Index into the prime list, giving the current table size. */
+	protected transient int p;
+
+	/** Threshold after which we rehash. It must be the table size times {@link #f}. */
+	protected transient int maxFill;
+
+	/** Number of free entries in the table (may be less than the table size - {@link #count} because of deleted entries). */
+	protected transient int free;
+
+	/** Number of entries in the set. */
+	protected int count;
+
+	/** The growth factor of the table. The next table size will be <code>{@link Hash#PRIMES}[{@link #p}+growthFactor</code>. */
+	protected transient int growthFactor = Hash.DEFAULT_GROWTH_FACTOR;
+
+#ifdef Linked
+	/** The index of the first entry in iteration order. It is valid iff {@link #count} is nonzero; otherwise, it contains -1. */
+	protected transient int first = -1;
+	/** The index of the last entry in iteration order. It is valid iff {@link #count} is nonzero; otherwise, it contains -1. */
+	protected transient int last = -1;
+	/** For each entry, the next and the previous entry in iteration order
+	exclusive-or'd together. It is valid only on {@link Hash#OCCUPIED}
+	entries. The first and the last entry contain the actual successor and
+	predecessor, respectively, exclusived-or'd with -1. */
+	protected transient int link[];
+#endif
+
+#ifdef Custom
+	/** The hash strategy of this custom set. */
+	protected Strategy<K> strategy;
+#endif
+
+    private static final long serialVersionUID = -7046029254386353129L;
+
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+#ifdef Custom
+	/** Creates a new hash set.
+	 *
+	 * The actual table size is the least available prime greater than <code>n</code>/<code>f</code>.
+	 *
+	 * @param n the expected number of elements in the hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 * @see Hash#PRIMES
+	 */
+	@SuppressWarnings("unchecked")
+	public OPEN_DOUBLE_HASH_SET( final int n, final float f, final Strategy<K> strategy ) {
+		this.strategy = strategy;
+#else
+	/** Creates a new hash set.
+	 *
+	 * The actual table size is the least available prime greater than <code>n</code>/<code>f</code>.
+	 *
+	 * @param n the expected number of elements in the hash set. 
+	 * @param f the load factor.
+	 * @see Hash#PRIMES
+	 */
+	@SuppressWarnings("unchecked")
+	public OPEN_DOUBLE_HASH_SET( final int n, final float f ) {
+#endif
+		if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
+		if ( n < 0 ) throw new IllegalArgumentException( "Hash table size must be nonnegative" );
+
+		int l = Arrays.binarySearch( PRIMES, (int)( n / f ) + 1 );
+		if ( l < 0 ) l = -l - 1;
+
+		free = PRIMES[ p = l ];
+		this.f = f;
+		this.maxFill = (int)( free * f );
+		key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ free ];
+		state = new byte[ free ];
+#ifdef Linked
+		link = new int[ free ];
+#endif
+	}
+	 
+	 
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param n the expected number of elements in the hash set. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final int n, final Strategy<K> strategy ) {
+		this( n, DEFAULT_LOAD_FACTOR, strategy );
+	}
+
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param n the expected number of elements in the hash set. 
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final int n ) {
+		this( n, DEFAULT_LOAD_FACTOR );
+	}
+
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_INITIAL_SIZE} elements
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Strategy<K> strategy ) {
+	this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_INITIAL_SIZE} elements
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET() {
+		this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR );
+	} 
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Collection<? extends KEY_GENERIC_CLASS> c, final float f, final Strategy<K> strategy ) {
+	this( c.size(), f, strategy );
+		addAll( c );
+	}
+#else
+	/** Creates a new hash set copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Collection<? extends KEY_GENERIC_CLASS> c, final float f ) {
+		this( c.size(), f );
+		addAll( c );
+	}
+#endif
+
+
+
+#ifdef Custom
+	/** Creates a new hash set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash set. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Collection<? extends KEY_GENERIC_CLASS> c, final Strategy<K> strategy ) {
+		this( c, DEFAULT_LOAD_FACTOR, strategy );
+	}
+
+#else
+	/** Creates a new hash set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash set. 
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		this( c, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final float f, Strategy<K> strategy ) {
+	this( c.size(), f, strategy );
+		addAll( c );
+	}
+#else
+	/** Creates a new hash set copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final float f ) {
+		this( c.size(), f );
+		addAll( c );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash set. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final Strategy<K> strategy ) {
+		this( c, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash set. 
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) {
+		this( c, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_ITERATOR KEY_GENERIC i, final float f, final Strategy<K> strategy ) {
+		this( DEFAULT_INITIAL_SIZE, f, strategy );
+		while( i.hasNext() ) add( i.NEXT_KEY() );
+	}
+#else
+	/** Creates a new hash set using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_ITERATOR KEY_GENERIC i, final float f ) {
+		this( DEFAULT_INITIAL_SIZE, f );
+		while( i.hasNext() ) add( i.NEXT_KEY() );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_ITERATOR KEY_GENERIC i, final Strategy<K> strategy ) {
+		this( i, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_ITERATOR KEY_GENERIC i ) {
+		this( i, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+
+#if #keys(primitive)
+
+#ifdef Custom
+	/** Creates a new hash set using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Iterator<?> i, final float f, final Strategy<K> strategy ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ), f, strategy );
+	}
+#else
+	/** Creates a new hash set using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Iterator<?> i, final float f ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ), f );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Iterator i, final Strategy<K> strategy ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i, strategy ) );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final Iterator<?> i ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ) );
+	}
+#endif
+
+#endif
+
+
+
+#ifdef Custom
+	/** Creates a new hash set and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final float f, final Strategy<K> strategy ) {
+	this( length < 0 ? 0 : length, f, strategy );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		for( int i = 0; i < length; i++ ) add( a[ offset + i ] );
+	}
+#else
+	/** Creates a new hash set and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final float f ) {
+		this( length < 0 ? 0 : length, f );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		for( int i = 0; i < length; i++ ) add( a[ offset + i ] );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final Strategy<K> strategy ) {
+		this( a, offset, length, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) {
+		this( a, offset, length, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_GENERIC_TYPE[] a, final float f, final Strategy<K> strategy ) {
+		this( a, 0, a.length, f, strategy );
+	}
+#else
+	/** Creates a new hash set copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_GENERIC_TYPE[] a, final float f ) {
+		this( a, 0, a.length, f );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash set. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_GENERIC_TYPE[] a, final Strategy<K> strategy ) {
+		this( a, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash set. 
+	 */
+	 
+	public OPEN_DOUBLE_HASH_SET( final KEY_GENERIC_TYPE[] a ) {
+		this( a, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Returns the hashing strategy.
+	 *
+	 * @return the hashing strategy of this custom hash set.
+	 */
+
+	public Strategy<K> strategy() {
+		return strategy;
+	}
+#endif
+
+	/** Sets the growth factor. Subsequent enlargements will increase the table
+	 * size roughly by a multiplicative factor of 2<sup>p/16</sup>.
+	 * 
+	 * @param growthFactor the new growth factor; it must be positive.
+	 */
+
+	public void growthFactor( int growthFactor ) {
+		if ( growthFactor <= 0 ) throw new IllegalArgumentException( "Illegal growth factor " + growthFactor );
+		this.growthFactor = growthFactor;
+	}
+
+
+	/** Gets the growth factor.
+	 *
+	 * @return the growth factor of this set.
+	 * @see #growthFactor(int)
+	 */
+
+	public int growthFactor() {
+		return growthFactor;
+	}
+
+
+	/*
+	 * The following methods implements some basic building blocks used by
+	 * all accessors. They are (and should be maintained) identical to those used in HashMap.drv.
+	 */
+
+	/** Searches for a key, keeping track of a possible insertion point.
+	 *
+	 * @param k the key.
+	 * @return the index of the correct insertion point, if the key is not found; otherwise,
+	 * <var>-i</var>-1, where <var>i</var> is the index of the entry containing the key.
+	 */
+
+	protected final int findInsertionPoint( final KEY_GENERIC_TYPE k ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final byte state[] = this.state;
+		final int n = key.length;
+
+		// First of all, we make the key into a positive integer.
+#if #keyclass(Object)
+		final int h, k2i = ( h = KEY2INTHASH( k ) ) & 0x7FFFFFFF; 
+#else
+		final int k2i = KEY2INTHASH(k) & 0x7FFFFFFF; 
+#endif
+		// The primary hash, a.k.a. starting point.
+		int h1 = k2i % n;
+
+		if ( state[ h1 ] == OCCUPIED && ! KEY_EQUALS( key[ h1 ], k ) ) {
+			// The secondary hash.
+			final int h2 = ( k2i % ( n - 2 ) ) + 1;
+			do {
+				h1 += h2;
+				if ( h1 >= n || h1 < 0 ) h1 -= n;
+			} while( state[ h1 ] == OCCUPIED && ! KEY_EQUALS( key[ h1 ], k ) ); // There's always a FREE entry.
+		}
+
+		if (state[ h1 ] == FREE) return h1;
+		if (state[ h1 ] == OCCUPIED) return -h1-1; // Necessarily, KEY_EQUALS( key[ h1 ], k ).
+
+		/* Tables without deletions will never use code beyond this point. */
+
+		final int i = h1; // Remember first available bucket for later.
+		  
+		/** See the comments in the documentation of the interface Hash. */
+		if ( ASSERTS ) assert state[ h1 ] == REMOVED;
+		if ( ! KEY_EQUALS( key[ h1 ], k ) ) {
+			// The secondary hash.
+			final int h2 = ( k2i % ( n - 2 ) ) + 1;
+			do {
+				h1 += h2;
+				if ( h1 >= n || h1 < 0 ) h1 -= n;
+			}  while( state[ h1 ] != FREE && ! KEY_EQUALS( key[ h1 ], k ) );
+		}
+		  
+		return state[ h1 ] == OCCUPIED ? -h1-1 : i; // In the first case, necessarily, KEY_EQUALS( key[ h1 ], k ).
+	}
+
+
+	/** Searches for a key.
+	 *
+	 * @param k the key.
+	 * @return the index of the entry containing the key, or -1 if the key wasn't found.
+	 */
+
+	protected final int findKey( final KEY_GENERIC_TYPE k ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final byte state[] = this.state;
+		final int n = key.length;
+
+		// First of all, we make the key into a positive integer.
+#if #keyclass(Object)
+		final int h, k2i = ( h = KEY2INTHASH( k ) ) & 0x7FFFFFFF; 
+#else
+		final int k2i = KEY2INTHASH(k) & 0x7FFFFFFF; 
+#endif
+		// The primary hash, a.k.a. starting point.
+		int h1 = k2i % n;
+		  
+		/** See the comments in the documentation of the interface Hash. */
+		if ( state[ h1 ] != FREE && ! KEY_EQUALS( key[ h1 ], k ) ) {
+			// The secondary hash.
+			final int h2 = ( k2i % ( n - 2 ) ) + 1;
+			do {
+				h1 += h2;
+				if ( h1 >= n || h1 < 0 ) h1 -= n;
+			} while( state[ h1 ] != FREE && ! KEY_EQUALS( key[ h1 ], k ) ); // There's always a FREE entry.
+		}
+
+		return state[ h1 ] == OCCUPIED ? h1 : -1;  // In the first case, necessarily, KEY_EQUALS( key[ h1 ], k ).
+	}
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		final int i = findInsertionPoint( k );
+		if ( i < 0 ) return false;
+
+		if ( state[ i ] == FREE ) free--;
+		state[ i ] = OCCUPIED;
+		key[ i ] = k;
+
+#ifdef Linked
+		if ( count == 0 ) {
+			first = last = i;
+			link[ i ] = 0;
+		}
+		else {
+			link[ last ] ^= i ^ -1;
+			link[ i ] = last ^ -1;
+			last = i;
+		}
+#endif
+
+		if ( ++count >= maxFill ) {
+			int newP = Math.min( p + growthFactor, PRIMES.length - 1 );
+			// Just to be sure that size changes when p is very small.
+			while( PRIMES[ newP ] == PRIMES[ p ] ) newP++;
+			rehash( newP ); // Table too filled, let's rehash
+		}
+		if ( free == 0 ) rehash( p );
+		if ( ASSERTS ) checkTable();
+		return true;
+	}
+
+	@SuppressWarnings("unchecked")
+	public boolean remove( final KEY_TYPE k ) {
+		final int i = findKey( KEY_GENERIC_CAST k );
+		if ( i < 0 ) return false;
+		state[ i ] = REMOVED;
+		count--;
+
+#if #keys(reference)
+		key[ i ] = KEY_GENERIC_CAST HashCommon.REMOVED;
+#endif
+
+#ifdef Linked
+		fixPointers( i );
+#endif
+
+		if ( ASSERTS ) checkTable();
+		return true;
+	}
+	 
+	@SuppressWarnings("unchecked")
+	public boolean contains( final KEY_TYPE k ) {
+		return findKey( KEY_GENERIC_CAST k ) >= 0;
+	}
+
+
+	/* Removes all elements from this set.
+	 *
+	 * <P>To increase object reuse, this method does not change the table size.
+	 * If you want to reduce the table size, you must use {@link #trim()}.
+	 *
+	 */
+
+	public void clear() {
+		if ( free == state.length ) return;
+
+		free = state.length;
+		count = 0;
+
+		ByteArrays.fill( state, FREE );
+
+#if #keys(reference)
+		ObjectArrays.fill( key, null );
+#endif
+
+#ifdef Linked
+		first = last = -1;
+#endif
+	}
+
+
+
+#ifdef Linked
+
+	/** Modifies the {@link #link} vector so that the given entry is removed.
+	 *
+	 * <P>If the given entry is the first or the last one, this method will complete
+	 * in constant time; otherwise, it will have to search for the given entry.
+	 *
+	 * @param i the index of an entry. 
+	 */
+	private void fixPointers( int i ) {
+		if ( count == 0 ) {
+			first = last = -1;
+			return;
+		}
+
+		if ( first == i ) {
+			first = link[ i ] ^ -1;
+			link[ first ] ^= i ^ -1;
+			return;
+		}
+
+		if ( last == i ) {
+			last = link[ i ] ^ -1;
+			link[ last ] ^= i ^ -1;
+			return;
+		}
+
+		int j = first, prev = -1, next;
+		while( ( next = link[ j ] ^ prev ) != i ) {
+			prev = j;
+			j = next;
+		}
+		link[ j ] ^= link[ i ] ^ i ^ j;
+		link[ link[ i ] ^ j ] ^= i ^ j;
+	}
+
+
+	/** Returns the first element of this set in iteration order.
+	 *
+	 * @return the first element in iteration order.
+	 */
+	public KEY_GENERIC_TYPE FIRST() {
+		if ( count == 0 ) throw new NoSuchElementException();
+		return key[ first ];
+	}
+
+
+	/** Returns the last element of this set in iteration order.
+	 *
+	 * @return the last element in iteration order.
+	 */
+	public KEY_GENERIC_TYPE LAST() {
+		if ( count == 0 ) throw new NoSuchElementException();
+		return key[ last ];
+	}
+
+
+	public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); }
+	public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+	public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+	
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; }
+
+
+
+	/** A list iterator over a linked set.
+	 *
+	 * <P>This class provides a list iterator over a linked hash set. The empty constructor runs in 
+	 * constant time. The one-argoument constructor needs to search for the given element, but it is 
+	 * optimized for the case of {@link java.util.SortedSet#last()}, in which case runs in constant time, too.
+	 */
+	private class SetIterator extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC {
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). */
+		int prev = -1;
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). */
+		int next = -1;
+		/** The last entry that was returned (or -1 if we did not iterate or used {@link #remove()}). */
+		int curr = -1;
+		/** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this {@link SetIterator} has been created using the nonempty constructor.*/
+		int index = 0;
+
+		SetIterator() {
+			next = first;
+		}
+
+		SetIterator( KEY_GENERIC_TYPE from ) {
+			if ( KEY_EQUALS( key[ last ], from ) ) {
+				prev = last;
+				index = count;
+			}
+			else {
+				if ( ! contains( from ) ) throw new IllegalArgumentException( "The key " + from + " does not belong to this set." );
+				next = first;
+				KEY_GENERIC_TYPE k;
+				do k = NEXT_KEY(); while( ! KEY_EQUALS( k, from ) );
+				curr = -1;
+			}
+		}
+					 
+		public boolean hasNext() { return next != -1; }
+		public boolean hasPrevious() { return prev != -1; }
+					 
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+
+			curr = next;
+			next = link[ curr ] ^ prev;
+			prev = curr;
+
+			index++;
+
+			return key[ curr ];
+		}
+
+		public KEY_GENERIC_TYPE PREV_KEY() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+
+			curr = prev;
+			prev = link[ curr ] ^ next;
+			next = curr;
+
+			index--;
+
+			return key[ curr ];
+		}
+
+		public int nextIndex() {
+			return index;
+		}
+
+		public int previousIndex() {
+			return index - 1;
+		}
+
+		
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			if ( curr == -1 ) throw new IllegalStateException();
+			state[ curr ] = REMOVED;
+#if #keys(reference)
+			key[ curr ] = KEY_GENERIC_CAST HashCommon.REMOVED;
+#endif
+			if ( curr == prev ) {
+				/* If the last operation was a next(), we are removing an entry that preceeds
+				   the current index, and thus we must decrement it. */
+				index--;
+				prev = link[ curr ] ^ next;
+			}
+			else next = link[ curr ] ^ prev; // curr == next
+
+			count--;
+			/* Now we manually fix the pointers. Because of our knowledge of next
+			   and prev, this is going to be faster than calling fixPointers(). */
+			if ( prev == -1 ) first = next;
+			else link[ prev ] ^= curr ^ next;
+			if ( next == -1 ) last = prev;
+			else link[ next ] ^= curr ^ prev;
+			curr = -1;
+		}
+	}
+
+
+	/** Returns a type-specific list iterator on the elements in this set, starting from a given element of the set.
+	 *
+	 * <P>This method provides an iterator positioned immediately after the
+	 * given element. That is, the next call to <code>previous()</code> will
+	 * return <code>from</code>, whereas the next call to <code>next()</code>
+	 * will return the element immediately after <code>from</code>. This
+	 * allows to call <code>iterator(last())</code> and obtain an iterator starting
+	 * from the end of the iteration order.
+	 *
+	 * <P>Because of the way linking is implemented, generating an iterator using this method
+	 * requires constant time only if the argument is <code>last()</code>. In all other cases,
+	 * a linear search for the given element will be necessary.
+	 *
+	 * <P>Note that this method returns a bidirectional iterator, which, however, can be safely cast to 
+	 * a type-specific list iterator.
+	 *
+	 * @param from an element to start from.
+	 * @return a type-specific list iterator starting at the given element.
+	 * @throws IllegalArgumentException if <code>from</code> does not belong to the set.
+	 */
+	public KEY_BIDI_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE from ) {
+		return new SetIterator( from );
+	}
+
+	public KEY_BIDI_ITERATOR KEY_GENERIC iterator() {
+		return new SetIterator();
+	}
+
+#else	 
+
+	/** An iterator over a hash set. */
+
+	private class SetIterator extends KEY_ABSTRACT_ITERATOR KEY_GENERIC {
+		/** The index of the next entry to be returned. */
+		int pos = 0;
+		/** The index of the last entry that has been returned. */
+		int last = -1;
+		/** A downward counter measuring how many entries have been returned. */
+		int c = count;
+		
+		{ 
+			final byte state[] = OPEN_DOUBLE_HASH_SET.this.state;
+			final int n = state.length;
+			
+			if ( c != 0 ) while( pos < n && state[pos] != OCCUPIED ) pos++;
+		}
+		
+		public boolean hasNext() {
+			return c != 0 && pos < OPEN_DOUBLE_HASH_SET.this.state.length;
+		}
+		
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			KEY_GENERIC_TYPE retVal;
+			final byte state[] = OPEN_DOUBLE_HASH_SET.this.state;
+			final int n = state.length;
+			
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			retVal = key[ last = pos ];
+			if ( --c != 0 ) do pos++; while( pos < n && state[ pos ] != OCCUPIED );
+			
+			return retVal;
+		}
+		
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			if ( last == -1 || state[ last ] != OCCUPIED ) throw new IllegalStateException();
+			state[last] = REMOVED;
+#if #keys(reference)
+			key[ last ] = KEY_GENERIC_CAST HashCommon.REMOVED;
+#endif
+			count--;
+		}
+	}
+
+	public KEY_ITERATOR KEY_GENERIC iterator() {
+		return new SetIterator();
+	}
+
+#endif
+
+
+
+	/** Rehashes this set without changing the table size.
+	 *
+	 * <P>This method should be called when the set underwent numerous
+	 * deletions and insertions.  In this case, free entries become rare, and
+	 * unsuccessful searches require probing <em>all</em> entries.  For
+	 * reasonable load factors this method is linear in the number of entries.
+	 * You will need as much additional free memory as that occupied by the
+	 * table.
+	 *
+	 * <P>If you need to reduce the table siza to fit exactly
+	 * this set, you must use {@link #trim()}.
+	 *
+	 * @return true if there was enough memory to rehash the set, false otherwise.
+	 * @see #trim()
+	 */
+
+	public boolean rehash() {
+		try {
+			rehash( p );
+		}
+		catch( OutOfMemoryError cantDoIt ) { return false; }
+		return true;
+	}
+
+
+	/** Rehashes this set, making the table as small as possible.
+	 * 
+	 * <P>This method rehashes the table to the smallest size satisfying the
+	 * load factor. It can be used when the set will not be changed anymore, so
+	 * to optimize access speed (by collecting deleted entries) and size.
+	 *
+	 * <P>If the table size is already the minimum possible, this method
+	 * does nothing. If you want to guarantee rehashing, use {@link #rehash()}.
+	 *
+	 * @return true if there was enough memory to trim the set.
+	 * @see #trim(int)
+	 * @see #rehash()
+	 */
+
+	public boolean trim() {
+		int l = Arrays.binarySearch( PRIMES, (int)( count / f ) + 1 );
+		if ( l < 0 ) l = -l - 1;
+		if ( l >= p ) return true;
+		try {
+			rehash( l );
+		}
+		catch(OutOfMemoryError cantDoIt) { return false; }
+		return true;
+	}
+
+	/** Rehashes this set if the table is too large.
+	 * 
+	 * <P>Let <var>N</var> be the smallest table size that can hold
+	 * <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current
+	 * table size is smaller than or equal to <var>N</var>, this method does
+	 * nothing. Otherwise, it rehashes this set in a table of size
+	 * <var>N</var>.
+	 *
+	 * <P>This method is useful when reusing sets.  {@linkplain #clear() Clearing a
+	 * set} leaves the table size untouched. If you are reusing a set
+	 * many times, you can call this method with a typical
+	 * size to avoid keeping around a very large table just
+	 * because of a few large transient sets.
+	 *
+	 * @param n the threshold for the trimming.
+	 * @return true if there was enough memory to trim the set.
+	 * @see #trim()
+	 * @see #rehash()
+	 */
+
+	public boolean trim( final int n ) {
+		int l = Arrays.binarySearch( PRIMES, (int)( Math.min( Integer.MAX_VALUE - 1, Math.max( n, count ) / f ) ) + 1 );
+		if ( l < 0 ) l = -l - 1;
+		if ( p <= l ) return true;
+		try {
+			rehash( l );
+		}
+		catch( OutOfMemoryError cantDoIt ) { return false; }
+		return true;
+	}
+
+	/** Resizes the set.
+	 *
+	 * <P>This method implements the basic rehashing strategy, and may be
+	 * overriden by subclasses implementing different rehashing strategies (e.g.,
+	 * disk-based rehashing). However, you should not override this method
+	 * unless you understand the internal workings of this class.
+	 *
+	 * @param newP the new size as an index in {@link Hash#PRIMES}.
+	 */
+
+	@SuppressWarnings("unchecked")
+	protected void rehash( final int newP ) {
+#ifdef Linked
+		int i = first, j = count, prev = -1, newPrev = -1, t, k2i, h1, h2;
+#else
+		int i = 0, j = count, k2i, h1, h2;
+		final byte state[] = this.state;
+#endif
+
+		//System.err.println("Rehashing to size " +  PRIMES[newP] + "...");
+
+		KEY_GENERIC_TYPE k;
+
+		final int newN = PRIMES[ newP ];
+		final KEY_GENERIC_TYPE key[] = this.key, newKey[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ newN ];
+		final byte newState[] = new byte[ newN ];
+#ifdef Linked
+		final int link[] = this.link, newLink[] = new int[ newN ];
+		first = -1;
+#endif
+
+		while( j-- != 0 ) {
+
+#ifndef Linked
+			while( state[ i ] != OCCUPIED ) i++;
+#endif
+
+			k = key[ i ];
+			k2i = KEY2INTHASH( k ) & 0x7FFFFFFF;
+
+			h1 = k2i % newN;
+
+			if ( newState[ h1 ] != FREE ) {
+				h2 = ( k2i % ( newN - 2 ) ) + 1;
+				do {
+					h1 += h2;
+					if ( h1 >= newN || h1 < 0 ) h1 -= newN;
+				} while( newState[ h1 ] != FREE );
+			}
+				
+			newState[ h1 ] = OCCUPIED;
+			newKey[ h1 ] = k;
+
+#ifdef Linked
+			t = i;
+			i = link[ i ] ^ prev;
+			prev = t;
+
+			if ( first != -1 ) {
+				newLink[ newPrev ] ^= h1;
+				newLink[ h1 ] = newPrev;
+				newPrev = h1;
+			}
+			else {
+				newPrev = first = h1;
+				newLink[ h1 ] = -1;
+			}
+#else
+			i++;
+#endif
+		}
+
+		p = newP;
+		free = newN - count;
+		maxFill = (int)( newN * f );
+		this.key = newKey;
+		this.state = newState;
+#ifdef Linked
+		this.link = newLink;
+		this.last = newPrev;
+		if ( newPrev != -1 ) newLink[ newPrev ] ^= -1; 
+#endif
+	}
+
+	public int size() {
+		return count;
+	}
+
+	public boolean isEmpty() {
+		return count == 0;
+	}
+
+
+
+	/** Returns a deep copy of this set. 
+	 *
+	 * <P>This method performs a deep copy of this hash set; the data stored in the
+	 * set, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 *  @return a deep copy of this set.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public OPEN_DOUBLE_HASH_SET KEY_GENERIC clone() {
+		OPEN_DOUBLE_HASH_SET KEY_GENERIC c;
+		try {
+			c = (OPEN_DOUBLE_HASH_SET KEY_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+		c.key = key.clone();
+		c.state = state.clone();
+#ifdef Linked
+		c.link = link.clone();
+#endif
+#ifdef Custom
+		c.strategy = strategy;
+#endif
+		return c;
+	}
+
+	/** Returns a hash code for this set.
+	 *
+	 * This method overrides the generic method provided by the superclass. 
+	 * Since <code>equals()</code> is not overriden, it is important
+	 * that the value returned by this method is the same value as
+	 * the one returned by the overriden method.
+	 *
+	 * @return a hash code for this set.
+	 */
+
+
+	public int hashCode() {
+		int h = 0, i = 0, j = count;
+		while( j-- != 0 ) {
+			while( state[ i ] != OCCUPIED ) i++;
+#if #keys(reference)
+			if ( this != key[ i ] )
+#endif
+				h += KEY2JAVAHASH( key[ i ] );
+			i++;
+		}
+		return h;
+	}
+
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		final KEY_ITERATOR KEY_GENERIC i = iterator();
+		int j = count;
+		s.defaultWriteObject();
+		while( j-- != 0 ) s.WRITE_KEY( i.NEXT_KEY() );
+	}
+
+
+	@SuppressWarnings("unchecked")
+	private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		// We restore the default growth factor.
+		growthFactor = Hash.DEFAULT_GROWTH_FACTOR;
+		// Note that we DO NOT USE the stored p. See CHANGES.
+		p = Arrays.binarySearch( PRIMES, (int)( count / f ) + 1 );
+		if ( p < 0 ) p = -p - 1;
+
+		final int n = PRIMES[ p ];
+		maxFill = (int)( n * f );
+		free = n - count;;
+		
+		final KEY_GENERIC_TYPE key[] = this.key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n ];
+		final byte state[] = this.state = new byte[ n ];
+#ifdef Linked
+		final int link[] = this.link = new int[ n ];
+		int prev = -1;
+		first = last = -1;
+#endif
+
+		int i, k2i, h1, h2;
+		KEY_GENERIC_TYPE k;
+
+		i = count;
+		while( i-- != 0 ) {
+
+			k = KEY_GENERIC_CAST s.READ_KEY();
+			k2i = KEY2INTHASH( k ) & 0x7FFFFFFF;
+
+			h1 = k2i % n;
+
+			if ( state[ h1 ] != FREE ) {
+				h2 = ( k2i % ( n - 2 ) ) + 1;
+				do {
+					h1 += h2;
+					if ( h1 >= n || h1 < 0 ) h1 -= n;
+				} while( state[ h1 ] != FREE );
+			}
+
+			state[ h1 ] = OCCUPIED;
+			key[ h1 ] = k;
+
+#ifdef Linked
+			if ( first != -1 ) {
+				link[ prev ] ^= h1;
+				link[ h1 ] = prev;
+				prev = h1;
+			}
+			else {
+				prev = first = h1;
+				link[ h1 ] = -1;
+			}
+#endif
+		}
+
+#ifdef Linked
+		last = prev;
+		if ( prev != -1 ) link[ prev ] ^= -1; 
+#endif
+
+		if ( ASSERTS ) checkTable();
+	}
+
+
+#ifdef ASSERTS_CODE
+	private void checkTable() {
+		int n = state.length;
+		while( n-- != 0 ) 
+			if ( state[ n ] == OCCUPIED && ! contains( key[ n ] ) ) 
+				throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" );
+
+#ifdef Linked
+		KEY_BIDI_ITERATOR KEY_GENERIC i = iterator();
+		KEY_GENERIC_TYPE k;
+		n = size();
+		while( n-- != 0 ) 
+			if ( ! contains( k = i.NEXT_KEY() ) ) 
+				throw new AssertionError( "Linked hash table forward enumerates key " + k + ", but the key does not belong to the table" );
+
+		if ( i.hasNext() ) throw new AssertionError( "Forward iterator not exhausted" );
+
+		n = size();
+		if ( n > 0 ) {
+			i = iterator( LAST() );
+			while( n-- != 0 ) 
+				if ( ! contains( k = i.PREV_KEY() ) ) 
+					throw new AssertionError( "Linked hash table backward enumerates key " + k + ", but the key does not belong to the table" );
+			
+			if ( i.hasPrevious() ) throw new AssertionError( "Previous iterator not exhausted" );
+		}
+#endif
+	}
+#else
+	private void checkTable() {}
+#endif
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+#ifdef Custom
+		int i = r.nextInt( 3 );
+		byte a[] = new byte[ i ];
+		while( i-- != 0 ) a[ i ] = (byte)r.nextInt();
+		return a;
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static final class ArrayComparator implements java.util.Comparator {
+		public int compare( Object a, Object b ) {
+			byte[] aa = (byte[])a;
+			byte[] bb = (byte[])b;
+			int length = Math.min( aa.length, bb.length );
+			for( int i = 0; i < length; i++ ) {
+				if ( aa[ i ] < bb[ i ] ) return -1;
+				if ( aa[ i ] > bb[ i ] ) return 1;
+			}
+			return aa.length == bb.length ? 0 : ( aa.length < bb.length ? -1 : 1 );
+		}
+	}
+
+	private static final class MockSet extends java.util.TreeSet {
+		private java.util.List list = new java.util.ArrayList();
+
+		public MockSet( java.util.Comparator c ) { super( c ); }
+
+		public boolean add( Object k ) {
+			if ( ! contains( k ) ) list.add( k );
+			return super.add( k );
+		}
+
+		public boolean addAll( Collection c ) {
+			java.util.Iterator i = c.iterator();
+			boolean result = false;
+			while( i.hasNext() ) result |= add( i.next() );
+			return result;
+		}
+
+		public boolean removeAll( Collection c ) {
+			java.util.Iterator i = c.iterator();
+			boolean result = false;
+			while( i.hasNext() ) result |= remove( i.next() );
+			return result;
+		}
+
+		public boolean remove( Object k ) {
+			if ( contains( k ) ) {
+				int i = list.size();
+				while( i-- != 0 ) if ( comparator().compare( list.get( i ), k ) == 0 ) {
+					list.remove( i );
+					break;
+				}
+			}
+			return super.remove( k );
+		}
+
+		private void justRemove( Object k ) { super.remove( k ); }
+
+		public java.util.Iterator iterator() {
+			return new java.util.Iterator() {
+					final java.util.Iterator iterator = list.iterator();
+					Object curr;
+					public Object next() { return curr = iterator.next(); }
+					public boolean hasNext() { return iterator.hasNext(); }
+					public void remove() { 
+						justRemove( curr );
+						iterator.remove(); 
+					}
+				};
+		}
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+	private static void speedTest( int n, float f, boolean comp ) {
+#ifndef Custom
+		int i, j;
+		OPEN_DOUBLE_HASH_SET m;
+#ifdef Linked
+		java.util.LinkedHashSet t;
+#else
+		java.util.HashSet t;
+#endif
+
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+		}
+		  
+		double totAdd = 0, totYes = 0, totNo = 0, totIter = 0, totRemYes = 0, totRemNo = 0, d;
+
+		if ( comp ) { for( j = 0; j < 20; j++ ) {
+
+#ifdef Linked
+			t = new java.util.LinkedHashSet( 16 );
+#else
+			t = new java.util.HashSet( 16 );
+#endif
+
+			/* We add pairs to t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.add( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totAdd += d; 				
+			System.out.print("Add: " + format( d ) +" K/s " );
+
+			/* We check for pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on t. */
+			ms = System.currentTimeMillis();
+			for( java.util.Iterator it = t.iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 				
+			System.out.print("Iter: " + format( d ) +" K/s " );
+				
+			/* We delete pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 				
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+				
+			/* We delete pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+		System.out.println();
+		System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + "K/s" );
+
+		System.out.println();
+
+		totAdd = totYes = totNo = totIter = totRemYes = totRemNo = 0;
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new OPEN_DOUBLE_HASH_SET( 16, f );
+
+			/* We add pairs to m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.add( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totAdd += d; 				
+			System.out.print("Add: " + format( d ) +" K/s " );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on m. */
+			ms = System.currentTimeMillis();
+			for( KEY_ITERATOR it = (KEY_ITERATOR)m.iterator(); it.hasNext(); it.NEXT_KEY() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 	 
+			System.out.print("Iter: " + format( d ) +" K/s " );
+
+			/* We delete pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 	
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+
+			/* We delete pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );	 
+
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Add: " + format( totAdd/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s" );
+
+		System.out.println();
+#endif
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+
+	private static void test( int n, float f ) {
+		int c;
+#ifdef Custom
+		OPEN_DOUBLE_HASH_SET m = new OPEN_DOUBLE_HASH_SET(Hash.DEFAULT_INITIAL_SIZE, f, it.unimi.dsi.fastutil.bytes.ByteArrays.HASH_STRATEGY);
+#else
+		OPEN_DOUBLE_HASH_SET m = new OPEN_DOUBLE_HASH_SET(Hash.DEFAULT_INITIAL_SIZE, f);
+#endif
+#ifdef Linked
+#ifdef Custom
+		java.util.Set t = new MockSet(new ArrayComparator());
+#else
+		java.util.Set t = new java.util.LinkedHashSet();
+#endif
+#else
+#ifdef Custom
+		java.util.Set t = new java.util.TreeSet(new ArrayComparator());
+#else 
+		java.util.Set t = new java.util.HashSet();
+#endif
+#endif
+
+		/* First of all, we fill t with random data. */
+
+		for(int i=0; i<n;  i++ ) t.add(KEY2OBJ(genKey()));
+		  
+		/* Now we add to m the same data */
+		  
+		m.addAll(t); 
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after insertion");
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!m.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.contains(T) != t.contains(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in keys between t and m (polymorphic method)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.contains(KEY2OBJ(T)) != t.contains(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence between t and m (standard method)");
+				System.exit( 1 );
+			}
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.add(KEY2OBJ(T)) != t.add(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in add() between t and m");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (m.remove(KEY2OBJ(T)) != t.remove(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after removal");
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!m.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		KEY_TYPE a[] = m.TO_KEY_ARRAY();
+		  
+#ifdef Custom
+		if (!new OPEN_DOUBLE_HASH_SET(a, m.strategy()).equals(m))
+			System.out.println("Error (" + seed + "): toArray() output (or array-based constructor) is not OK");
+#else
+		if (!new OPEN_DOUBLE_HASH_SET(a).equals(m))
+			System.out.println("Error (" + seed + "): toArray() output (or array-based constructor) is not OK");
+#endif
+
+		/* Now we check cloning. */
+
+		ensure( m.equals( ((OPEN_DOUBLE_HASH_SET)m).clone() ), "Error (" + seed + "): m does not equal m.clone()" );
+		ensure( ((OPEN_DOUBLE_HASH_SET)m).clone().equals( m ), "Error (" + seed + "): m.clone() does not equal m" );
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m = (OPEN_DOUBLE_HASH_SET)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if !#keyclass(Reference)
+		if (m.hashCode() != h) System.out.println("Error (" + seed + "): hashCode() changed after save/read");
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after save/read");
+				System.exit( 1 );
+			}
+		}
+#else
+		m.clear();
+		m.addAll( t );
+#endif
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.add(KEY2OBJ(T)) != t.add(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in add() between t and m after save/read");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (m.remove(KEY2OBJ(T)) != t.remove(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m after save/read");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after post-save/read removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after post-save/read removal");
+
+
+#ifdef Linked
+
+				 
+		/* Now we play with iterators, but only in the linked case. */
+
+		{
+			java.util.ListIterator i, j;
+			Object I, J;
+			i = (java.util.ListIterator)m.iterator(); 
+			j = new java.util.LinkedList( t ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.next(), J = j.next() ), "Error (" + seed + "): divergence in next()" );
+#else
+					ensure( i.next().equals( J = j.next() ), "Error (" + seed + "): divergence in next()" );
+#endif
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.previous(), J = j.previous() ), "Error (" + seed + "): divergence in previous()" );
+#else
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + seed + "): divergence in previous()" );
+#endif
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+
+		if ( t.size() > 0 ) {
+			java.util.ListIterator i, j;
+			Object J;
+			j = new java.util.LinkedList( t ).listIterator(); 
+			int e = r.nextInt( t.size() );
+			Object from;
+			do from = j.next(); while( e-- != 0 );
+
+			i = (java.util.ListIterator)m.iterator( KEY_OBJ2TYPE( from ) ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.next(), J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+#else
+					ensure( i.next().equals( J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+#endif
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.previous(), J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+#else
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+#endif
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex() (iterator with starting point " + from + ")" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex() (iterator with starting point " + from + ")" );
+
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + seed + "): ! t.equals( m ) after iteration" );
+
+
+
+#endif
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(java.util.Iterator i=m.iterator(); i.hasNext(); ) { i.next(); i.remove();} 
+
+		if (!m.isEmpty())  {
+			System.out.println("Error (" + seed + "): m is not empty (as it should be)");
+			System.exit( 1 );
+		}
+
+#if #keyclass(Integer) || #keyclass(Long)
+		m = new OPEN_DOUBLE_HASH_SET(n, f);
+		t.clear();
+		int x;
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		int p = m.state.length;
+
+		for(int i=0; i<p; i++) {
+			for (int j=0; j<20; j++) {
+				m.add(i+(r.nextInt() % 10)*p);
+				m.remove(i+(r.nextInt() % 10)*p);
+			}
+
+			for (int j=-10; j<10; j++) m.remove(i+j*p);
+		}
+		  
+		t.addAll(m);
+
+		/* Now all table entries are REMOVED. */
+ 
+		int k = 0;
+		for(int i=0; i<(p*f)/10; i++) {
+			for (int j=0; j<10; j++) {
+				k++;
+				x = i+(r.nextInt() % 10)*p;
+				if (m.add(x) != t.add(KEY2OBJ(x)))
+					System.out.println("Error (" + seed + "): m and t differ on a key during torture-test insertion.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test insertion");
+
+		for(int i=0; i<(p*f)/10; i++) {
+			for (int j=0; j<10; j++) {
+				x = i+(r.nextInt() % 10)*p;
+				if (m.remove(x) != t.remove(KEY2OBJ(x)))
+					System.out.println("Error (" + seed + "): m and t differ on a key during torture-test removal.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test removal");
+
+		if (!m.equals(m.clone())) System.out.println("Error (" + seed + "): !m.equals(m.clone()) after torture-test removal");
+		if (!((OPEN_DOUBLE_HASH_SET)m.clone()).equals(m)) System.out.println("Error (" + seed + "): !m.clone().equals(m) after torture-test removal");
+
+		m.rehash();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after rehash()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after rehash()");
+
+		m.trim();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after trim()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after trim()");
+#endif
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		float f = Hash.DEFAULT_LOAD_FACTOR;
+		int n  = Integer.parseInt(args[1]);
+		if (args.length>2) f = Float.parseFloat(args[2]);
+		if ( args.length > 3 ) r = new java.util.Random( seed = Long.parseLong( args[ 3 ] ) );
+		  
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, f, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n, f);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/OpenHashBigSet.drv b/drv/OpenHashBigSet.drv
new file mode 100644
index 0000000..85c6121
--- /dev/null
+++ b/drv/OpenHashBigSet.drv
@@ -0,0 +1,1298 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.BigArrays;
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.Size64;
+import it.unimi.dsi.fastutil.HashCommon;
+import it.unimi.dsi.fastutil.booleans.BooleanBigArrays;
+import static it.unimi.dsi.fastutil.HashCommon.bigArraySize;
+import static it.unimi.dsi.fastutil.HashCommon.maxFill;
+
+import java.util.Collection;
+#if #keys(primitive)
+import java.util.Iterator;
+#endif
+import java.util.NoSuchElementException;
+
+
+/**  A type-specific hash big set with with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a big set: the number
+ * of elements in the set is limited only by the amount of core memory. The table is
+ * backed by a {@linkplain it.unimi.dsi.fastutil.BigArrays big array} and is
+ * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim(long) trimming
+ * method} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <p>The methods of this class are about 30% slower than those of the corresponding non-big set.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_HASH_BIG_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash, Size64 {
+
+    private static final long serialVersionUID = 0L;
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	/** The big array of keys. */
+	protected transient KEY_GENERIC_TYPE[][] key;
+	 
+	/** The big array telling whether a position is used. */
+	protected transient boolean[][] used;
+
+	/** The acceptable load factor. */
+	protected final float f;
+	 
+	/** The current table size (always a power of 2). */
+	protected transient long n;
+
+	/** Threshold after which we rehash. It must be the table size times {@link #f}. */
+	protected transient long maxFill;
+
+	/** The mask for wrapping a position counter. */
+	protected transient long mask;
+
+	/** The mask for wrapping a segment counter. */
+	protected transient int segmentMask;
+
+	/** The mask for wrapping a base counter. */
+	protected transient int baseMask;
+
+	/** Number of entries in the set. */
+	protected long size;
+
+
+	/** Initialises the mask values. */
+	private void initMasks() {
+		mask = n - 1;
+		/* Note that either we have more than one segment, and in this case all segments
+		 * are BigArrays.SEGMENT_SIZE long, or we have exactly one segment whose length
+		 * is a power of two. */
+		segmentMask = key[ 0 ].length - 1;
+		baseMask = key.length - 1;		
+	}
+
+	/** Creates a new hash big set.
+	 *
+	 * <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
+	 *
+	 * @param expected the expected number of elements in the set. 
+	 * @param f the load factor.
+	 */
+	@SuppressWarnings("unchecked")
+	public OPEN_HASH_BIG_SET( final long expected, final float f ) {
+		if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
+		if ( n < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" );
+
+		this.f = f;
+		
+		n = bigArraySize( expected, f );
+		maxFill = maxFill( n, f );
+		key = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( n );
+		used = BooleanBigArrays.newBigArray( n );
+		initMasks();
+	}
+	 
+	 
+	/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param expected the expected number of elements in the hash big set. 
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final long expected ) {
+		this( expected, DEFAULT_LOAD_FACTOR );
+	}
+
+	/** Creates a new hash big set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 */
+	 
+	public OPEN_HASH_BIG_SET() {
+		this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR );
+	} 
+
+	/** Creates a new hash big set copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash big set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final Collection<? extends KEY_GENERIC_CLASS> c, final float f ) {
+		this( c.size(), f );
+		addAll( c );
+	}
+
+	/** Creates a new hash big set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash big set. 
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		this( c, DEFAULT_LOAD_FACTOR );
+	}
+
+	/** Creates a new hash big set copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash big set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final float f ) {
+		this( c.size(), f );
+		addAll( c );
+	}
+
+	/** Creates a new hash big set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash big set. 
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) {
+		this( c, DEFAULT_LOAD_FACTOR );
+	}
+
+	/** Creates a new hash big set using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the new hash big set.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final KEY_ITERATOR KEY_GENERIC i, final float f ) {
+		this( DEFAULT_INITIAL_SIZE, f );
+		while( i.hasNext() ) add( i.NEXT_KEY() );
+	}
+
+	/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the new hash big set.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final KEY_ITERATOR KEY_GENERIC i ) {
+		this( i, DEFAULT_LOAD_FACTOR );
+	}
+
+
+#if #keys(primitive)
+
+	/** Creates a new hash big set using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the new hash big set.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final Iterator<?> i, final float f ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ), f );
+	}
+	/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the new hash big set.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final Iterator<?> i ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ) );
+	}
+
+#endif
+
+
+	/** Creates a new hash big set and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the new hash big set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final float f ) {
+		this( length < 0 ? 0 : length, f );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		for( int i = 0; i < length; i++ ) add( a[ offset + i ] );
+	}
+
+	/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the new hash big set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) {
+		this( a, offset, length, DEFAULT_LOAD_FACTOR );
+	}
+
+	/** Creates a new hash big set copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash big set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final KEY_GENERIC_TYPE[] a, final float f ) {
+		this( a, 0, a.length, f );
+	}
+
+	/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash big set. 
+	 */
+	 
+	public OPEN_HASH_BIG_SET( final KEY_GENERIC_TYPE[] a ) {
+		this( a, DEFAULT_LOAD_FACTOR );
+	}
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		final long h = KEY2LONGHASH( k );
+
+		// The starting point.
+		int displ = (int)( h & segmentMask );
+		int base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
+
+		// There's always an unused entry.
+		while( used[ base ][ displ ] ) {
+			if ( KEY_EQUALS( key[ base ][ displ ], k ) ) return false;
+			base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
+		}
+
+		used[ base ][ displ ] = true;
+		key[ base ][ displ ] = k;
+
+		if ( ++size >= maxFill ) rehash( 2 * n );
+		if ( ASSERTS ) checkTable();
+		return true;
+	}
+
+	/** Shifts left entries with the specified hash code, starting at the specified position,
+	 * and empties the resulting free entry.
+	 *
+	 * @param pos a starting position.
+	 * @return the position cleared by the shifting process.
+	 */
+	protected final long shiftKeys( long pos ) {
+		// Shift entries with the same hash.
+		long last, slot;
+
+		/*
+		for( int i = 0; i < 10; i++ ) System.err.print( key[ ( t + i ) & mask ] + "(" + (avalanche( (long)KEY2INT( key[ ( t + i ) & mask ] ) ) & mask) + "; " + used[ ( t + i ) & mask ] + ") ");
+		System.err.println();
+		*/
+		for(;;) {
+			pos = ( ( last = pos ) + 1 ) & mask;
+			
+			while( BooleanBigArrays.get( used, pos ) ) {
+				slot = KEY2LONGHASH( BIG_ARRAYS.get( key, pos ) ) & mask;
+				if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
+				pos = ( pos + 1 ) & mask;
+			}
+
+			if ( ! BooleanBigArrays.get( used, pos ) ) break;
+
+			BIG_ARRAYS.set( key, last, BIG_ARRAYS.get( key, pos ) );
+		}
+
+		BooleanBigArrays.set( used, last, false );
+#if #keys(reference)
+		BIG_ARRAYS.set( key, last, null );
+#endif
+		return last;
+	}
+
+	@SuppressWarnings("unchecked")
+	public boolean remove( final KEY_TYPE k ) {
+		final long h = KEY2LONGHASH( k );
+
+		// The starting point.
+		int displ = (int)( h & segmentMask );
+		int base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
+
+		// There's always an unused entry.
+		while( used[ base ][ displ ] ) {
+			if ( KEY_EQUALS( key[ base ][ displ ], k ) ) {
+				size--;
+				shiftKeys( base * (long)BigArrays.SEGMENT_SIZE + displ );
+				if ( ASSERTS ) checkTable();
+				return true;
+			}
+			base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
+		}
+
+		return false;
+	}
+	 
+	@SuppressWarnings("unchecked")
+	public boolean contains( final KEY_TYPE k ) {
+		final long h = KEY2LONGHASH( k );
+
+		// The starting point.
+		int displ = (int)( h & segmentMask );
+		int base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
+
+		// There's always an unused entry.
+		while( used[ base ][ displ ] ) {
+			if ( KEY_EQUALS( key[ base ][ displ ], k ) ) return true; 
+			base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
+		}
+
+		return false;
+	}
+
+#if #keyclass(Object)
+	/** Returns the element of this set that is equal to the given key, or <code>null</code>.
+	 * @return the element of this set that is equal to the given key, or <code>null</code>.
+	 */
+	public K get( final KEY_TYPE k ) {
+		final long h = KEY2LONGHASH( k );
+
+		// The starting point.
+		int displ = (int)( h & segmentMask );
+		int base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
+
+		// There's always an unused entry.
+		while( used[ base ][ displ ] ) {
+			if ( KEY_EQUALS( key[ base ][ displ ], k ) ) return key[ base ][ displ ]; 
+			base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
+		}
+
+		return null;
+	}
+#endif
+
+	/* Removes all elements from this set.
+	 *
+	 * <P>To increase object reuse, this method does not change the table size.
+	 * If you want to reduce the table size, you must use {@link #trim(long)}.
+	 *
+	 */
+
+	public void clear() {
+		if ( size == 0 ) return;
+		size = 0;
+		BooleanBigArrays.fill( used, false );
+#if #keys(reference)
+		ObjectBigArrays.fill( key, null );
+#endif
+	}
+
+
+
+	/** An iterator over a hash big set. */
+
+	private class SetIterator extends KEY_ABSTRACT_ITERATOR KEY_GENERIC {
+		/** The base of the next entry to be returned, if positive or zero. If negative, the next entry to be
+			returned, if any, is that of index -base -2 from the {@link #wrapped} list. */
+		int base;
+		/** The displacement of the next entry to be returned. */
+		int displ;
+		/** The base of the last entry that has been returned. It is -1 if either
+			we did not return an entry yet, or the last returned entry has been removed. */
+		int lastBase;
+		/** The displacement of the last entry that has been returned. It is undefined if either
+			we did not return an entry yet, or the last returned entry has been removed. */        
+		int lastDispl;
+		/** A downward counter measuring how many entries must still be returned. */
+		long c = size;
+		/** A lazily allocated list containing elements that have wrapped around the table because of removals; such elements
+			would not be enumerated (other elements would be usually enumerated twice in their place). */
+		ARRAY_LIST KEY_GENERIC wrapped;
+
+		{ 
+			base = key.length;
+			lastBase = -1;
+			final boolean used[][] = OPEN_HASH_BIG_SET.this.used;
+			if ( c != 0 ) do 
+				if ( displ-- == 0 ) {
+					base--;
+				   	displ = (int)mask;
+				}
+			while( ! used[ base ][ displ ] );
+		}
+
+		public boolean hasNext() {
+			return c != 0;
+		}
+
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+
+			c--;
+			// We are just enumerating elements from the wrapped list.
+			if ( base < 0 ) return wrapped.GET_KEY( - ( lastBase = --base ) - 2  );
+
+			final KEY_GENERIC_TYPE retVal = key[ lastBase = base ][ lastDispl = displ ];
+			
+			if ( c != 0 ) {
+				final boolean used[][] = OPEN_HASH_BIG_SET.this.used;
+				do 
+					if ( displ-- == 0 ) {
+						if ( base-- == 0 ) break;
+				   		displ = (int)mask;
+					}
+				while( ! used[ base ][ displ ] );
+				// When here base < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty.
+			}
+			
+			return retVal;
+		}
+
+		/** Shifts left entries with the specified hash code, starting at the specified position,
+		 * and empties the resulting free entry. If any entry wraps around the table, instantiates
+		 * lazily {@link #wrapped} and stores the entry.
+		 *
+		 * @param pos a starting position.
+		 * @return the position cleared by the shifting process.
+		 */
+		protected final long shiftKeys( long pos ) {
+			// Shift entries with the same hash.
+			long last, slot;
+	
+			/*
+			for( int i = 0; i < 10; i++ ) System.err.print( key[ ( t + i ) & mask ] + "(" + (avalanche( (long)KEY2INT( key[ ( t + i ) & mask ] ) ) & mask) + "; " + used[ ( t + i ) & mask ] + ") ");
+			System.err.println();
+			*/
+			for(;;) {
+				pos = ( ( last = pos ) + 1 ) & mask;
+				
+				while( BooleanBigArrays.get( used, pos ) ) {
+					slot = KEY2LONGHASH( BIG_ARRAYS.get( key, pos ) ) & mask;
+					if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
+					pos = ( pos + 1 ) & mask;
+				}
+	
+				if ( ! BooleanBigArrays.get( used, pos ) ) break;
+				if ( pos < last ) {
+					// Wrapped entry.
+					if ( wrapped == null ) wrapped = new ARRAY_LIST KEY_GENERIC();
+					wrapped.add( BIG_ARRAYS.get( key, pos ) );
+				}
+
+				BIG_ARRAYS.set( key, last, BIG_ARRAYS.get( key, pos ) );
+			}
+	
+			BooleanBigArrays.set( used, last, false );
+	#if #keys(reference)
+			BIG_ARRAYS.set( key, last, null );
+	#endif
+			return last;
+		}
+
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			if ( lastBase == -1 ) throw new IllegalStateException();
+			if ( base < -1 ) {
+				// We're removing wrapped entries.
+#if #keys(reference)
+				OPEN_HASH_BIG_SET.this.remove( wrapped.set( - base - 2, null ) );
+#else
+				OPEN_HASH_BIG_SET.this.remove( wrapped.GET_KEY( - base - 2 ) );
+#endif
+				lastBase = -1;
+				return;
+			}
+			size--;
+			if ( shiftKeys( lastBase * (long)BigArrays.SEGMENT_SIZE + lastDispl ) == base * (long)BigArrays.SEGMENT_SIZE + displ && c > 0 ) {
+				c++;
+				NEXT_KEY();
+			}
+			lastBase = -1; // You can no longer remove this entry.
+
+			if ( ASSERTS ) checkTable();
+		}
+	}
+
+	public KEY_ITERATOR KEY_GENERIC iterator() {
+		return new SetIterator();
+	}
+
+
+	/** A no-op for backward compatibility. The kind of tables implemented by
+	 * this class never need rehashing.
+	 *
+	 * <P>If you need to reduce the table size to fit exactly
+	 * this set, use {@link #trim()}.
+	 *
+	 * @return true.
+	 * @see #trim()
+	 * @deprecated A no-op.
+	 */
+
+	@Deprecated
+	public boolean rehash() {
+		return true;
+	}
+
+	/** Rehashes this set, making the table as small as possible.
+	 * 
+	 * <P>This method rehashes the table to the smallest size satisfying the
+	 * load factor. It can be used when the set will not be changed anymore, so
+	 * to optimize access speed and size.
+	 *
+	 * <P>If the table size is already the minimum possible, this method
+	 * does nothing.
+	 *
+	 * @return true if there was enough memory to trim the set.
+	 * @see #trim(long)
+	 */
+
+	public boolean trim() {
+		final long l = bigArraySize( size, f );
+		if ( l >= n ) return true;
+		try {
+			rehash( l );
+		}
+		catch(OutOfMemoryError cantDoIt) { return false; }
+		return true;
+	}
+
+	/** Rehashes this set if the table is too large.
+	 * 
+	 * <P>Let <var>N</var> be the smallest table size that can hold
+	 * <code>max(n,{@link #size64()})</code> entries, still satisfying the load factor. If the current
+	 * table size is smaller than or equal to <var>N</var>, this method does
+	 * nothing. Otherwise, it rehashes this set in a table of size
+	 * <var>N</var>.
+	 *
+	 * <P>This method is useful when reusing sets.  {@linkplain #clear() Clearing a
+	 * set} leaves the table size untouched. If you are reusing a set
+	 * many times, you can call this method with a typical
+	 * size to avoid keeping around a very large table just
+	 * because of a few large transient sets.
+	 *
+	 * @param n the threshold for the trimming.
+	 * @return true if there was enough memory to trim the set.
+	 * @see #trim()
+	 */
+
+	public boolean trim( final long n ) {
+		final long l = bigArraySize( n, f );
+		if ( this.n <= l ) return true;
+		try {
+			rehash( l );
+		}
+		catch( OutOfMemoryError cantDoIt ) { return false; }
+		return true;
+	}
+
+	/** Resizes the set.
+	 *
+	 * <P>This method implements the basic rehashing strategy, and may be
+	 * overriden by subclasses implementing different rehashing strategies (e.g.,
+	 * disk-based rehashing). However, you should not override this method
+	 * unless you understand the internal workings of this class.
+	 *
+	 * @param newN the new size
+	 */
+
+	@SuppressWarnings("unchecked")
+	protected void rehash( final long newN ) {
+		final boolean used[][] = this.used;
+		final KEY_GENERIC_TYPE key[][] = this.key;
+		final boolean newUsed[][] = BooleanBigArrays.newBigArray( newN );
+		final KEY_GENERIC_TYPE newKey[][] = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( newN );
+		final long newMask = newN - 1;
+		final int newSegmentMask = newKey[ 0 ].length - 1;
+		final int newBaseMask = newKey.length - 1;		
+
+		int base = 0, displ = 0;
+		long h;
+		KEY_GENERIC_TYPE k;
+
+		for( long i = size; i-- != 0; ) {
+
+			while( ! used[ base ][ displ ] ) base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) );
+
+			k = key[ base ][ displ ];
+			h = KEY2LONGHASH( k );
+
+			// The starting point.
+			int d = (int)( h & newSegmentMask );
+			int b = (int)( ( h & newMask ) >>> BigArrays.SEGMENT_SHIFT );
+
+			while( newUsed[ b ][ d ] ) b = ( b + ( ( d = ( d + 1 ) & newSegmentMask ) == 0 ? 1 : 0 ) ) & newBaseMask;
+
+			newUsed[ b ][ d ] = true;
+			newKey[ b ][ d ] = k;
+
+			base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) );
+		}
+
+		this.n = newN;
+		this.key = newKey;
+		this.used = newUsed;
+		initMasks();
+		maxFill = maxFill( n, f );
+	}
+
+	@Deprecated
+	public int size() {
+		return (int)Math.min( Integer.MAX_VALUE, size );
+	}
+
+	public long size64() {
+		return size;
+	}
+
+	public boolean isEmpty() {
+		return size == 0;
+	}
+
+
+
+	/** Returns a deep copy of this big set. 
+	 *
+	 * <P>This method performs a deep copy of this big hash set; the data stored in the
+	 * set, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 *  @return a deep copy of this big set.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public OPEN_HASH_BIG_SET KEY_GENERIC clone() {
+		OPEN_HASH_BIG_SET KEY_GENERIC c;
+		try {
+			c = (OPEN_HASH_BIG_SET KEY_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+		c.key = BIG_ARRAYS.copy( key );
+		c.used = BooleanBigArrays.copy( used );
+		return c;
+	}
+
+	/** Returns a hash code for this set.
+	 *
+	 * This method overrides the generic method provided by the superclass. 
+	 * Since <code>equals()</code> is not overriden, it is important
+	 * that the value returned by this method is the same value as
+	 * the one returned by the overriden method.
+	 *
+	 * @return a hash code for this set.
+	 */
+
+
+	public int hashCode() {
+		final boolean used[][] = this.used;
+		final KEY_GENERIC_TYPE key[][] = this.key;
+		int h = 0;
+		int base = 0, displ = 0;
+		for( long j = size; j-- != 0; ) {
+			while( ! used[ base ][ displ ] ) base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) );
+#if #keys(reference)
+			if ( this != key[ base ][ displ ] )
+#endif
+				h += KEY2JAVAHASH( key[ base ][ displ ] );
+			base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) );
+		}
+		return h;
+	}
+
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		final KEY_ITERATOR KEY_GENERIC i = iterator();
+		s.defaultWriteObject();
+		for( long j = size; j-- != 0; ) s.WRITE_KEY( i.NEXT_KEY() );
+	}
+
+
+	@SuppressWarnings("unchecked")
+	private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+
+		n = bigArraySize( size, f );
+		maxFill = maxFill( n, f );
+		
+		final KEY_GENERIC_TYPE[][] key = this.key = KEY_GENERIC_BIG_ARRAY_CAST BIG_ARRAYS.newBigArray( n );
+		final boolean used[][] = this.used = BooleanBigArrays.newBigArray( n );
+	
+		initMasks();
+
+		long h;
+		KEY_GENERIC_TYPE k;
+		int base, displ;
+
+		for( long i = size; i-- != 0; ) {
+
+			k = KEY_GENERIC_CAST s.READ_KEY();
+			h = KEY2LONGHASH( k );
+
+			base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
+			displ = (int)( h & segmentMask );
+			
+			while( used[ base ][ displ ] ) base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
+
+			used[ base ][ displ ] = true;
+			key[ base ][ displ ] = k;
+		}
+
+		if ( ASSERTS ) checkTable();
+	}
+
+
+#ifdef ASSERTS_CODE
+	private void checkTable() {
+		final boolean[][] used = this.used;
+		assert ( n & -n ) == n : "Table length is not a power of two: " + n;
+		assert n == BIG_ARRAYS.length( key );
+		assert n == BooleanBigArrays.length( used );
+		long n = this.n;
+		while( n-- != 0 ) 
+			if ( BooleanBigArrays.get( used, n ) && ! contains( BIG_ARRAYS.get( key, n ) ) ) 
+				throw new AssertionError( "Hash table has key " + BIG_ARRAYS.get( key, n ) + " marked as occupied, but the key does not belong to the table" );
+
+#if #keys(primitive)
+		java.util.HashSet<KEY_GENERIC_CLASS> s = new java.util.HashSet<KEY_GENERIC_CLASS> ();
+#else
+		java.util.HashSet<Object> s = new java.util.HashSet<Object>();
+#endif
+		
+		for( long i = size(); i-- != 0; )
+			if ( BooleanBigArrays.get( used, i ) && ! s.add( BIG_ARRAYS.get( key, i ) ) ) throw new AssertionError( "Key " + BIG_ARRAYS.get( key, i ) + " appears twice" );
+
+	}
+#else
+	private void checkTable() {}
+#endif
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static final class ArrayComparator implements java.util.Comparator {
+		public int compare( Object a, Object b ) {
+			byte[] aa = (byte[])a;
+			byte[] bb = (byte[])b;
+			int length = Math.min( aa.length, bb.length );
+			for( int i = 0; i < length; i++ ) {
+				if ( aa[ i ] < bb[ i ] ) return -1;
+				if ( aa[ i ] > bb[ i ] ) return 1;
+			}
+			return aa.length == bb.length ? 0 : ( aa.length < bb.length ? -1 : 1 );
+		}
+	}
+
+	private static final class MockSet extends java.util.TreeSet {
+		private java.util.List list = new java.util.ArrayList();
+
+		public MockSet( java.util.Comparator c ) { super( c ); }
+
+		public boolean add( Object k ) {
+			if ( ! contains( k ) ) list.add( k );
+			return super.add( k );
+		}
+
+		public boolean addAll( Collection c ) {
+			java.util.Iterator i = c.iterator();
+			boolean result = false;
+			while( i.hasNext() ) result |= add( i.next() );
+			return result;
+		}
+
+		public boolean removeAll( Collection c ) {
+			java.util.Iterator i = c.iterator();
+			boolean result = false;
+			while( i.hasNext() ) result |= remove( i.next() );
+			return result;
+		}
+
+		public boolean remove( Object k ) {
+			if ( contains( k ) ) {
+				int i = list.size();
+				while( i-- != 0 ) if ( comparator().compare( list.get( i ), k ) == 0 ) {
+					list.remove( i );
+					break;
+				}
+			}
+			return super.remove( k );
+		}
+
+		private void justRemove( Object k ) { super.remove( k ); }
+
+		public java.util.Iterator iterator() {
+			return new java.util.Iterator() {
+					final java.util.Iterator iterator = list.iterator();
+					Object curr;
+					public Object next() { return curr = iterator.next(); }
+					public boolean hasNext() { return iterator.hasNext(); }
+					public void remove() { 
+						justRemove( curr );
+						iterator.remove(); 
+					}
+				};
+		}
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+	private static void speedTest( int n, float f, boolean comp ) {
+		int i, j;
+		OPEN_HASH_BIG_SET m;
+		java.util.HashSet t;
+
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+		}
+		  
+		double totAdd = 0, totYes = 0, totNo = 0, totIter = 0, totRemYes = 0, totRemNo = 0, d;
+
+		if ( comp ) { for( j = 0; j < 20; j++ ) {
+
+			t = new java.util.HashSet( 16 );
+
+			/* We add pairs to t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.add( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totAdd += d; 				
+			System.out.print("Add: " + format( d ) +" K/s " );
+
+			/* We check for pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on t. */
+			ms = System.currentTimeMillis();
+			for( java.util.Iterator it = t.iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 				
+			System.out.print("Iter: " + format( d ) +" K/s " );
+				
+			/* We delete pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 				
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+				
+			/* We delete pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+		System.out.println();
+		System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + "K/s" );
+
+		System.out.println();
+
+		totAdd = totYes = totNo = totIter = totRemYes = totRemNo = 0;
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new OPEN_HASH_BIG_SET( 16, f );
+
+			/* We add pairs to m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.add( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totAdd += d; 				
+			System.out.print("Add: " + format( d ) +" K/s " );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on m. */
+			ms = System.currentTimeMillis();
+			for( KEY_ITERATOR it = (KEY_ITERATOR)m.iterator(); it.hasNext(); it.NEXT_KEY() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 	 
+			System.out.print("Iter: " + format( d ) +" K/s " );
+
+			/* We delete pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 	
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+
+			/* We delete pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );	 
+
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Add: " + format( totAdd/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s" );
+
+		System.out.println();
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+
+	private static void printProbes( OPEN_HASH_BIG_SET m ) {
+		long totProbes = 0;
+		double totSquareProbes = 0;
+		int maxProbes = 0;	
+		final double f = (double)m.size / m.n;
+		for( int i = 0, c = 0; i < m.n; i++ ) {
+			if ( BooleanBigArrays.get( m.used, i ) ) c++;
+			else {
+				if ( c != 0 ) {
+					final long p = ( c + 1 ) * ( c + 2 ) / 2;
+					totProbes += p;
+					totSquareProbes += (double)p * p;
+				}
+				maxProbes = Math.max( c, maxProbes );
+				c = 0;
+				totProbes++;
+				totSquareProbes++;
+			}
+		}
+
+		final double expected = (double)totProbes / m.n;
+		System.err.println( "Expected probes: " + ( 
+			3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1
+		) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected )  + "; max probes: " + maxProbes );
+	}
+	private static void test( int n, float f ) {
+		int c;
+		OPEN_HASH_BIG_SET m = new OPEN_HASH_BIG_SET(Hash.DEFAULT_INITIAL_SIZE, f);
+		java.util.Set t = new java.util.HashSet();
+
+		/* First of all, we fill t with random data. */
+
+		for(int i=0; i<f * n;  i++ ) t.add(KEY2OBJ(genKey()));
+		  
+		/* Now we add to m the same data */
+		  
+		m.addAll(t); 
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after insertion");
+		printProbes( m );
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!m.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		c = 0;		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			c++;
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		if ( c != t.size() ) {
+			System.out.println("Error (" + seed + "): m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)");
+			System.exit( 1 );
+		}
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.contains(T) != t.contains(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in keys between t and m (polymorphic method)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.contains(KEY2OBJ(T)) != t.contains(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence between t and m (standard method)");
+				System.exit( 1 );
+			}
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.add(KEY2OBJ(T)) != t.add(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in add() between t and m");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (m.remove(KEY2OBJ(T)) != t.remove(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after removal");
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!m.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		printProbes( m );
+
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		KEY_TYPE a[] = m.TO_KEY_ARRAY();
+		  
+		if (!new OPEN_HASH_BIG_SET(a).equals(m))
+			System.out.println("Error (" + seed + "): toArray() output (or array-based constructor) is not OK");
+
+		/* Now we check cloning. */
+
+		ensure( m.equals( ((OPEN_HASH_BIG_SET)m).clone() ), "Error (" + seed + "): m does not equal m.clone()" );
+		ensure( ((OPEN_HASH_BIG_SET)m).clone().equals( m ), "Error (" + seed + "): m.clone() does not equal m" );
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m = (OPEN_HASH_BIG_SET)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if !#keyclass(Reference)
+		if (m.hashCode() != h) System.out.println("Error (" + seed + "): hashCode() changed after save/read");
+
+		printProbes( m );
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after save/read");
+				System.exit( 1 );
+			}
+		}
+#else
+		m.clear();
+		m.addAll( t );
+#endif
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.add(KEY2OBJ(T)) != t.add(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in add() between t and m after save/read");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (m.remove(KEY2OBJ(T)) != t.remove(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m after save/read");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after post-save/read removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after post-save/read removal");
+
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(java.util.Iterator i=m.iterator(); i.hasNext(); ) { i.next(); i.remove();} 
+
+		if (!m.isEmpty())  {
+			System.out.println("Error (" + seed + "): m is not empty (as it should be)");
+			System.exit( 1 );
+		}
+
+#if #keyclass(Integer) || #keyclass(Long)
+		m = new OPEN_HASH_BIG_SET(n, f);
+		t.clear();
+		int x;
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		int p = m.used.length;
+
+		for(int i=0; i<p; i++) {
+			for (int j=0; j<20; j++) {
+				m.add(i+(r.nextInt() % 10)*p);
+				m.remove(i+(r.nextInt() % 10)*p);
+			}
+
+			for (int j=-10; j<10; j++) m.remove(i+j*p);
+		}
+		  
+		t.addAll(m);
+
+		/* Now all table entries are REMOVED. */
+ 
+		int k = 0;
+		for(int i=0; i<(p*f)/10; i++) {
+			for (int j=0; j<10; j++) {
+				k++;
+				x = i+(r.nextInt() % 10)*p;
+				if (m.add(x) != t.add(KEY2OBJ(x)))
+					System.out.println("Error (" + seed + "): m and t differ on a key during torture-test insertion.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test insertion");
+
+		for(int i=0; i<(p*f)/10; i++) {
+			for (int j=0; j<10; j++) {
+				x = i+(r.nextInt() % 10)*p;
+				if (m.remove(x) != t.remove(KEY2OBJ(x)))
+					System.out.println("Error (" + seed + "): m and t differ on a key during torture-test removal.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test removal");
+
+		if (!m.equals(m.clone())) System.out.println("Error (" + seed + "): !m.equals(m.clone()) after torture-test removal");
+		if (!((OPEN_HASH_BIG_SET)m.clone()).equals(m)) System.out.println("Error (" + seed + "): !m.clone().equals(m) after torture-test removal");
+
+		m.rehash();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after rehash()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after rehash()");
+
+#endif
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		float f = Hash.DEFAULT_LOAD_FACTOR;
+		int n  = Integer.parseInt(args[1]);
+		if (args.length>2) f = Float.parseFloat(args[2]);
+		if ( args.length > 3 ) r = new java.util.Random( seed = Long.parseLong( args[ 3 ] ) );
+		  
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, f, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n, f);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/OpenHashMap.drv b/drv/OpenHashMap.drv
new file mode 100644
index 0000000..be2b9ee
--- /dev/null
+++ b/drv/OpenHashMap.drv
@@ -0,0 +1,3061 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.HashCommon;
+import it.unimi.dsi.fastutil.booleans.BooleanArrays;
+import static it.unimi.dsi.fastutil.HashCommon.arraySize;
+import static it.unimi.dsi.fastutil.HashCommon.maxFill;
+
+import java.util.Map;
+import java.util.NoSuchElementException;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION;
+
+#if #values(primitive) || #keys(primitive) && #valueclass(Object)
+import VALUE_PACKAGE.VALUE_ITERATOR;
+#endif
+
+#if #keys(reference) || #values(reference)
+import it.unimi.dsi.fastutil.objects.ObjectArrays;
+#endif
+
+
+#ifdef Linked
+
+import java.util.Comparator;
+
+#if #key(reference)
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+#endif
+
+#if #values(primitive)
+import VALUE_PACKAGE.VALUE_LIST_ITERATOR;
+#endif
+
+#if #keys(primitive) && #valueclass(Reference)
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+#endif
+
+import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet;
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator;
+import it.unimi.dsi.fastutil.objects.ObjectSortedSet;
+
+#else
+
+import it.unimi.dsi.fastutil.objects.AbstractObjectSet;
+
+#if #keys(primitive) && ! #valueclass(Object)
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+#endif
+
+#endif
+
+
+#ifdef Linked
+/**  A type-specific linked hash map with with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a map. The table is
+ * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <P>Iterators generated by this map will enumerate pairs in the same order in which they
+ * have been added to the map (addition of pairs whose key is already present 
+ * in the set does not change the iteration order). Note that this order has nothing in common with the natural
+ * order of the keys. The order is kept by means of a doubly linked list, represented
+ * <i>via</i> an array of longs parallel to the table.
+ *
+ * <P>This class implements the interface of a sorted map, so to allow easy
+ * access of the iteration order: for instance, you can get the first key
+ * in iteration order with {@link #firstKey()} without having to create an
+ * iterator; however, this class partially violates the {@link java.util.SortedMap}
+ * contract because all submap methods throw an exception and {@link
+ * #comparator()} returns always <code>null</code>.
+ *
+ * <p>Additional methods, such as <code>getAndMoveToFirst()</code>, make it easy
+ * to use instances of this class as a cache (e.g., with LRU policy).
+ *
+ * <P>The iterators provided by the views of this class using are type-specific
+ * {@linkplain java.util.ListIterator list iterators}, and can be started at any
+ * element <em>which is a key of the map</em>, or
+ * a {@link NoSuchElementException} exception will be thrown.
+ * If, however, the provided element is not the first or last key in the
+ * set, the first access to the list index will require linear time, as in the worst case
+ * the entire key set must be scanned in iteration order to retrieve the positional
+ * index of the starting key. If you use just the methods of a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator},
+ * however, all operations will be performed in constant time.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#else
+
+#ifdef Custom
+
+/** A type-specific hash map with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy}
+ * is specified at creation time.
+ *
+ * <P>Instances of this class use a hash table to represent a map. The table is
+ * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <p><strong>Warning:</strong> The implementation of this class has significantly
+ * changed in <code>fastutil</code> 6.1.0. Please read the
+ * comments about this issue in the section “Faster Hash Tables” of the <a href="../../../../../overview-summary.html">overview</a>.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#else
+
+/** A type-specific hash map with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a map. The table is
+ * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <p><strong>Warning:</strong> The implementation of this class has significantly
+ * changed in <code>fastutil</code> 6.1.0. Please read the
+ * comments about this issue in the section “Faster Hash Tables” of the <a href="../../../../../overview-summary.html">overview</a>.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#endif
+
+#endif
+
+    private static final long serialVersionUID = 0L;
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	/** The array of keys. */
+	protected transient KEY_GENERIC_TYPE key[];
+
+	/** The array of values. */
+	protected transient VALUE_GENERIC_TYPE value[];
+	 
+	/** The array telling whether a position is used. */
+	protected transient boolean used[];
+
+	/** The acceptable load factor. */
+	protected final float f;
+	 
+	/** The current table size. */
+	protected transient int n;
+
+	/** Threshold after which we rehash. It must be the table size times {@link #f}. */
+	protected transient int maxFill;
+
+	/** The mask for wrapping a position counter. */
+	protected transient int mask;
+
+	/** Number of entries in the set. */
+	protected int size;
+
+#ifdef Linked
+	/** Cached set of entries. */
+	protected transient volatile FastSortedEntrySet KEY_VALUE_GENERIC entries;
+
+	/** Cached set of keys. */
+	protected transient volatile SORTED_SET KEY_GENERIC keys;
+#else
+	/** Cached set of entries. */
+	protected transient volatile FastEntrySet KEY_VALUE_GENERIC entries;
+
+	/** Cached set of keys. */
+	protected transient volatile SET KEY_GENERIC keys;
+#endif
+
+	/** Cached collection of values. */
+	protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+
+#ifdef Linked
+	/** The index of the first entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */
+	protected transient int first = -1;
+	/** The index of the last entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */
+	protected transient int last = -1;
+	/** For each entry, the next and the previous entry in iteration order,
+     * stored as <code>((prev & 0xFFFFFFFFL) << 32) | (next & 0xFFFFFFFFL)</code>.
+     * The first entry contains predecessor -1, and the last entry 
+     * contains successor -1. */
+     protected transient long link[];
+     
+     /* Macros for transforming the bi-directional long link. Return values are 32-bit int indexes.
+      * SET_UPPER and SET_LOWER do a masked assignment as described at
+      * http://www-graphics.stanford.edu/~seander/bithacks.html#MaskedMerge
+      */
+#endif
+
+#ifdef Custom
+	/** The hash strategy of this custom map. */
+	protected STRATEGY KEY_GENERIC strategy;
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map.
+	 *
+	 * <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
+	 *
+	 * @param expected the expected number of elements in the hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	@SuppressWarnings("unchecked")
+	public OPEN_HASH_MAP( final int expected, final float f, final STRATEGY KEY_GENERIC strategy ) {
+		this.strategy = strategy;
+#else
+	/** Creates a new hash map.
+	 *
+	 * <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
+	 *
+	 * @param expected the expected number of elements in the hash set. 
+	 * @param f the load factor.
+	 */
+	@SuppressWarnings("unchecked")
+	public OPEN_HASH_MAP( final int expected, final float f ) {
+#endif
+		if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
+		if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" );
+
+		this.f = f;
+
+		n = arraySize( expected, f );
+		mask = n - 1;
+		maxFill = maxFill( n, f );
+		key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n ];
+		value = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[ n ];
+		used = new boolean[ n ];
+#ifdef Linked
+		link = new long[ n ];
+#endif
+	}
+	 
+	 
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param expected the expected number of elements in the hash map.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_MAP( final int expected, final STRATEGY KEY_GENERIC strategy ) {
+		this( expected, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param expected the expected number of elements in the hash map.
+	 */
+	 
+	public OPEN_HASH_MAP( final int expected ) {
+		this( expected, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash map with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} entries
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_MAP( final STRATEGY KEY_GENERIC strategy ) {
+		this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} entries
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 */
+	 
+	public OPEN_HASH_MAP() {
+		this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash map copying a given one.
+	 *
+	 * @param m a {@link Map} to be copied into the new hash map. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m, final float f, final STRATEGY KEY_GENERIC strategy ) {
+		this( m.size(), f, strategy );
+		putAll( m );
+	}
+#else
+	/** Creates a new hash map copying a given one.
+	 *
+	 * @param m a {@link Map} to be copied into the new hash map. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m, final float f ) {
+		this( m.size(), f );
+		putAll( m );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one.
+	 *
+	 * @param m a {@link Map} to be copied into the new hash map. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m, final STRATEGY KEY_GENERIC strategy ) {
+		this( m, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one.
+	 *
+	 * @param m a {@link Map} to be copied into the new hash map. 
+	 */
+	 
+	public OPEN_HASH_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) {
+		this( m, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map copying a given type-specific one.
+	 *
+	 * @param m a type-specific map to be copied into the new hash map. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final float f, final STRATEGY KEY_GENERIC strategy ) {
+		this( m.size(), f, strategy );
+		putAll( m );
+	}
+#else
+	/** Creates a new hash map copying a given type-specific one.
+	 *
+	 * @param m a type-specific map to be copied into the new hash map. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final float f ) {
+		this( m.size(), f );
+		putAll( m );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one.
+	 *
+	 * @param m a type-specific map to be copied into the new hash map. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_MAP( final MAP KEY_VALUE_GENERIC m, final STRATEGY KEY_GENERIC strategy ) {
+		this( m, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one.
+	 *
+	 * @param m a type-specific map to be copied into the new hash map. 
+	 */
+	 
+	public OPEN_HASH_MAP( final MAP KEY_VALUE_GENERIC m ) {
+		this( m, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash map using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new hash map.
+	 * @param v the array of corresponding values in the new hash map.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public OPEN_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final float f, final STRATEGY KEY_GENERIC strategy ) {
+		this( k.length, f, strategy );
+		if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
+		for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
+	}
+#else
+	/** Creates a new hash map using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new hash map.
+	 * @param v the array of corresponding values in the new hash map.
+	 * @param f the load factor.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public OPEN_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final float f ) {
+		this( k.length, f );
+		if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
+		for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new hash map.
+	 * @param v the array of corresponding values in the new hash map.
+	 * @param strategy the strategy.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public OPEN_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final STRATEGY KEY_GENERIC strategy ) {
+		this( k, v, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new hash map.
+	 * @param v the array of corresponding values in the new hash map.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public OPEN_HASH_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[] ) {
+		this( k, v, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+
+#ifdef Custom
+	/** Returns the hashing strategy.
+	 *
+	 * @return the hashing strategy of this custom hash map.
+	 */
+
+	public STRATEGY KEY_GENERIC strategy() {
+		return strategy;
+	}
+#endif
+
+	/*
+	 * The following methods implements some basic building blocks used by
+	 * all accessors. They are (and should be maintained) identical to those used in OpenHashSet.drv.
+	 */
+
+	public VALUE_GENERIC_TYPE put(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v) {
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) )  {
+				final VALUE_GENERIC_TYPE oldValue = value[ pos ];
+				value[ pos ] = v;
+				return oldValue;
+			}
+			pos = ( pos + 1 ) & mask;
+		}
+		  
+		used[ pos ] = true;
+		key[ pos ] = k;
+		value[ pos ] = v;
+		
+#ifdef Linked
+		if ( size == 0 ) {
+			first = last = pos;
+			// Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 );
+			link[ pos ] = -1L;
+		}
+		else {
+			SET_NEXT( link[ last ], pos );
+			SET_UPPER_LOWER(  link[ pos ], last, -1  );
+			last = pos;
+		}
+#endif
+
+		if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
+		if ( ASSERTS ) checkTable();
+		return defRetValue;
+	}
+
+
+
+#if #values(primitive) || #keys(primitive)
+
+	public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) {
+		final VALUE_GENERIC_TYPE v = VALUE_CLASS2TYPE( ov );
+		final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE( ok );
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) )  {
+				final VALUE_GENERIC_CLASS oldValue = VALUE2OBJ( value[ pos ] );
+				value[ pos ] = v;
+				return oldValue;		
+			}
+			pos = ( pos + 1 ) & mask;
+		}
+		  
+		used[ pos ] = true;
+		key[ pos ] = k;
+		value[ pos ] = v;
+		
+#ifdef Linked
+		if ( size == 0 ) {
+			first = last = pos;
+			// Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 );
+			link[ pos ] = -1L;
+		}
+		else {
+			SET_NEXT( link[ last ], pos );
+			SET_UPPER_LOWER( link[ pos ], last, -1 );
+			last = pos;
+		}
+#endif
+
+		if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
+		if ( ASSERTS ) checkTable();
+		return OBJECT_DEFAULT_RETURN_VALUE;
+	}
+	 
+
+#endif
+
+
+#if #valueclass(Byte) || #valueclass(Short) || #valueclass(Char) || #valueclass(Integer) || #valueclass(Long) || #valueclass(Float) || #valueclass(Double)
+
+	/** Adds an increment to value currently associated with a key.
+	 *
+	 * @param k the key.
+	 * @param incr the increment.
+	 * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 * @deprecated use <code>addTo()</code> instead; having the same name of a {@link java.util.Set} method turned out to be a recipe for disaster.
+	 */
+	@Deprecated
+	public VALUE_GENERIC_TYPE add(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE incr) {
+		return addTo( k, incr );
+	}
+	
+	/** Adds an increment to value currently associated with a key.
+	 *
+	 * <P>Note that this method respects the {@linkplain #defaultReturnValue() default return value} semantics: when
+	 * called with a key that does not currently appears in the map, the key
+	 * will be associated with the default return value plus
+	 * the given increment.
+	 *
+	 * @param k the key.
+	 * @param incr the increment.
+	 * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 */
+	public VALUE_GENERIC_TYPE addTo(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE incr) {
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) )  {
+				final VALUE_GENERIC_TYPE oldValue = value[ pos ];
+				value[ pos ] += incr;
+				return oldValue;
+			}
+			pos = ( pos + 1 ) & mask;
+		}
+		  
+		used[ pos ] = true;
+		key[ pos ] = k;
+#if #valueclass(Byte) || #valueclass(Short) || #valueclass(Char)		
+		value[ pos ] = (VALUE_TYPE)(defRetValue + incr);
+#else
+		value[ pos ] = defRetValue + incr;
+#endif
+		
+#ifdef Linked
+		if ( size == 0 ) {
+			first = last = pos;
+			// Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 );
+			link[ pos ] = -1L;
+		}
+		else {
+			SET_NEXT( link[ last ], pos );
+			SET_UPPER_LOWER( link[ pos ], last, -1 );
+			last = pos;
+		}
+#endif
+
+		if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
+		if ( ASSERTS ) checkTable();
+		return defRetValue;
+	}
+
+#endif
+
+	/** Shifts left entries with the specified hash code, starting at the specified position,
+	 * and empties the resulting free entry.
+	 *
+	 * @param pos a starting position.
+	 * @return the position cleared by the shifting process.
+	 */
+	protected final int shiftKeys( int pos ) {
+		// Shift entries with the same hash.
+		int last, slot;
+
+		for(;;) {
+			pos = ( ( last = pos ) + 1 ) & mask;
+			
+			while( used[ pos ] ) {
+				slot = KEY2INTHASH( key[ pos ] ) & mask;
+				if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
+				pos = ( pos + 1 ) & mask;
+			}
+
+			if ( ! used[ pos ] ) break;
+
+			key[ last ] = key[ pos ];			
+			value[ last ] = value[ pos ];			
+
+#ifdef Linked
+			fixPointers( pos, last );
+#endif
+		}
+
+		used[ last ] = false;
+#if #keys(reference)
+		key[ last ] = null;
+#endif
+#if #values(reference)
+		value[ last ] = null;
+#endif
+		return last;
+	}
+
+
+
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) {
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) ) {
+				size--;
+#ifdef Linked
+				fixPointers( pos );
+#endif
+				final VALUE_GENERIC_TYPE v = value[ pos ];
+				shiftKeys( pos );
+				return v;
+			}	
+			pos = ( pos + 1 ) & mask;
+		}
+
+		return defRetValue;
+	}
+
+
+#if #keys(primitive) || #values(primitive)
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_CLASS remove( final Object ok ) {
+		final KEY_GENERIC_TYPE k = KEY_GENERIC_CAST KEY_OBJ2TYPE( ok );
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) ) {
+				size--;
+#ifdef Linked
+				fixPointers( pos );
+#endif
+
+				final VALUE_GENERIC_TYPE v = value[ pos ];
+				shiftKeys( pos );
+				return VALUE2OBJ( v );
+			}
+			pos = ( pos + 1 ) & mask;
+		}
+		
+		return OBJECT_DEFAULT_RETURN_VALUE;
+	}
+#endif
+
+
+#ifdef Linked
+	
+	/** Removes the mapping associated with the first key in iteration order.
+	 * @return the value previously associated with the first key in iteration order.
+	 * @throws NoSuchElementException is this map is empty.
+	 */
+	public VALUE_GENERIC_TYPE REMOVE_FIRST_VALUE() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		--size;
+		final int pos = first;
+		// Abbreviated version of fixPointers(pos)
+		first = GET_NEXT(link[ pos ]);
+		if ( 0 <= first ) {
+			// Special case of SET_PREV( link[ first ], -1 )
+			link[ first ] |= (-1 & 0xFFFFFFFFL) << 32;
+		}
+		final VALUE_GENERIC_TYPE v = value[ pos ];
+		shiftKeys( pos );	
+		return v;
+	}
+
+	/** Removes the mapping associated with the last key in iteration order.
+	 * @return the value previously associated with the last key in iteration order.
+	 * @throws NoSuchElementException is this map is empty.
+	 */
+	public VALUE_GENERIC_TYPE REMOVE_LAST_VALUE() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		--size;
+		final int pos = last;
+		// Abbreviated version of fixPointers(pos)
+		last = GET_PREV(link[ pos ]);
+		if ( 0 <= last ) {
+			// Special case of SET_NEXT( link[ last ], -1 )
+			link[ last ] |= -1 & 0xFFFFFFFFL;
+		}
+		final VALUE_GENERIC_TYPE v = value[ pos ];
+		shiftKeys( pos );
+		return v;
+	}
+
+	private void moveIndexToFirst( final int i ) {
+		if ( size == 1 || first == i ) return;
+		if ( last == i ) {
+			last = GET_PREV(link[ i ]);
+			// Special case of SET_NEXT( link[ last ], -1 );
+			link[ last ] |= -1 & 0xFFFFFFFFL;
+		}
+		else {
+			final long linki = link[ i ];
+			final int prev = GET_PREV(linki);
+			final int next = GET_NEXT(linki);
+			COPY_NEXT(link[ prev ], linki);
+			COPY_PREV(link[ next ], linki);
+		}
+		SET_PREV( link[ first ], i );
+		SET_UPPER_LOWER( link[ i ], -1, first );
+		first = i;
+	}
+
+	private void moveIndexToLast( final int i ) {
+		if ( size == 1 ||  last == i ) return;
+		if ( first == i ) {
+			first = GET_NEXT(link[ i ]);
+			// Special case of SET_PREV( link[ first ], -1 );
+			link[ first ] |= (-1 & 0xFFFFFFFFL) << 32;
+		}
+		else {
+			final long linki = link[ i ];
+			final int prev = GET_PREV(linki);
+			final int next = GET_NEXT(linki);
+			COPY_NEXT(link[ prev ], linki);
+			COPY_PREV(link[ next ], linki);
+		}
+		SET_NEXT( link[ last ], i );
+		SET_UPPER_LOWER( link[ i ], last, -1 );
+		last = i;
+	}
+
+	/** Returns the value to which the given key is mapped; if the key is present, it is moved to the first position of the iteration order.
+	 *
+	 * @param k the key.
+	 * @return the corresponding value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 */
+	public VALUE_GENERIC_TYPE getAndMoveToFirst( final KEY_GENERIC_TYPE k ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final boolean used[] = this.used;
+		final int mask = this.mask;
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if( KEY_EQUALS( k, key[ pos ] ) ) {
+				moveIndexToFirst( pos );
+				return value[ pos ];
+			}
+			pos = ( pos + 1 ) & mask;
+		}
+
+		return defRetValue;
+	}
+	
+	/** Returns the value to which the given key is mapped; if the key is present, it is moved to the last position of the iteration order.
+	 *
+	 * @param k the key.
+	 * @return the corresponding value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 */
+	public VALUE_GENERIC_TYPE getAndMoveToLast( final KEY_GENERIC_TYPE k ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final boolean used[] = this.used;
+		final int mask = this.mask;
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if( KEY_EQUALS( k, key[ pos ] ) ) {
+				moveIndexToLast( pos );
+				return value[ pos ];
+			}
+			pos = ( pos + 1 ) & mask;
+		}
+
+		return defRetValue;
+	}
+	
+	/** Adds a pair to the map; if the key is already present, it is moved to the first position of the iteration order.
+	 *
+	 * @param k the key.
+	 * @param v the value.
+	 * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 */
+	public VALUE_GENERIC_TYPE putAndMoveToFirst( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final boolean used[] = this.used;
+		final int mask = this.mask;
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( k, key[ pos ] ) ) {
+				final VALUE_GENERIC_TYPE oldValue = value[ pos ];
+				value[ pos ] = v;
+				moveIndexToFirst( pos );
+				return oldValue;
+			}
+			
+			pos = ( pos + 1 ) & mask;
+		}
+		  
+		used[ pos ] = true;
+		key[ pos ] = k;
+		value[ pos ] = v;
+		
+		if ( size == 0 ) {
+			first = last = pos;
+			// Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 );
+			link[ pos ] = -1L;
+		}
+		else {
+			SET_PREV( link[ first ], pos );
+			SET_UPPER_LOWER( link[ pos ], -1, first );
+			first = pos;
+		}
+
+		if ( ++size >= maxFill ) rehash( arraySize( size, f ) );
+		if ( ASSERTS ) checkTable();
+		return defRetValue;
+	}
+
+	/** Adds a pair to the map; if the key is already present, it is moved to the last position of the iteration order.
+	 *
+	 * @param k the key.
+	 * @param v the value.
+	 * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
+	 */
+	public VALUE_GENERIC_TYPE putAndMoveToLast( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final boolean used[] = this.used;
+		final int mask = this.mask;
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( k, key[ pos ] ) ) {
+				final VALUE_GENERIC_TYPE oldValue = value[ pos ];
+				value[ pos ] = v;
+				moveIndexToLast( pos );
+				return oldValue;
+			}
+			
+			pos = ( pos + 1 ) & mask;
+		}
+		  
+		used[ pos ] = true;
+		key[ pos ] = k;
+		value[ pos ] = v;
+		
+		if ( size == 0 ) {
+			first = last = pos;
+			// Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 );
+			link[ pos ] = -1L;
+		}
+		else {
+			SET_NEXT( link[ last ], pos );
+			SET_UPPER_LOWER( link[ pos ], last, -1 );
+			last = pos;
+		}
+
+		if ( ++size >= maxFill ) rehash( arraySize( size, f ) );
+		if ( ASSERTS ) checkTable();
+		return defRetValue;
+	}
+
+#endif
+
+#if #keys(primitive)
+
+	public VALUE_GENERIC_CLASS get( final KEY_CLASS ok ) {
+		final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE( ok );
+
+		// The starting point.
+		int pos = KEY2INTHASH( KEY_GENERIC_CAST k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], KEY_GENERIC_CAST k ) ) return VALUE2OBJ( value[ pos ] );
+			pos = ( pos + 1 ) & mask;
+		}
+
+		return OBJECT_DEFAULT_RETURN_VALUE;
+	}
+
+#endif
+
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) {
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) ) return value[ pos ];
+			pos = ( pos + 1 ) & mask;
+		}
+
+		return defRetValue;
+	}
+
+
+
+	@SuppressWarnings("unchecked")
+	public boolean containsKey( final KEY_TYPE k ) {
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) ) return true;
+			pos = ( pos + 1 ) & mask;
+		}
+		return false;
+	}
+
+
+	public boolean containsValue( final VALUE_TYPE v ) {
+		final VALUE_GENERIC_TYPE value[] = this.value;
+		final boolean used[] = this.used;
+
+		for( int i = n; i-- != 0; ) if ( used[ i ] && VALUE_EQUALS( value[ i ], v ) ) return true;
+		return false;
+	}
+
+	/* Removes all elements from this map.
+	 *
+	 * <P>To increase object reuse, this method does not change the table size.
+	 * If you want to reduce the table size, you must use {@link #trim()}.
+	 *
+	 */
+	public void clear() {
+		if ( size == 0 ) return;
+		size = 0;
+		BooleanArrays.fill( used, false );
+
+		// We null all object entries so that the garbage collector can do its work.
+#if #keys(reference)
+		ObjectArrays.fill( key, null );
+#endif
+#if #values(reference)
+		ObjectArrays.fill( value, null );
+#endif
+
+#ifdef Linked
+		first = last = -1;
+#endif
+	}
+
+	public int size() {
+		return size;
+	}
+
+	public boolean isEmpty() {
+		return size == 0;
+	}
+
+
+	/** A no-op for backward compatibility.
+	 * 
+	 * @param growthFactor unused.
+	 * @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
+	 */
+	@Deprecated
+	public void growthFactor( int growthFactor ) {}
+
+
+	/** Gets the growth factor (2).
+	 *
+	 * @return the growth factor of this set, which is fixed (2).
+	 * @see #growthFactor(int)
+	 * @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
+	 */
+	@Deprecated
+	public int growthFactor() {
+		return 16;
+	}
+
+
+	/** The entry class for a hash map does not record key and value, but
+	 * rather the position in the hash table of the corresponding entry. This
+	 * is necessary so that calls to {@link java.util.Map.Entry#setValue(Object)} are reflected in
+	 * the map */
+
+	private final class MapEntry implements MAP.Entry KEY_VALUE_GENERIC, Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> {
+		// The table index this entry refers to, or -1 if this entry has been deleted.
+		private int index;
+		
+		MapEntry( final int index ) {
+			this.index = index;
+		}
+		
+		public KEY_GENERIC_CLASS getKey() {
+			return KEY2OBJ( key[ index ] );
+		}
+		  
+#if #keys(primitive)
+		public KEY_TYPE ENTRY_GET_KEY() {
+	   		return key[ index ];
+		}
+#endif
+
+		public VALUE_GENERIC_CLASS getValue() {
+			return VALUE2OBJ( value[ index ] );
+		}
+		  
+#if #values(primitive)
+		public VALUE_GENERIC_TYPE ENTRY_GET_VALUE() {
+			return value[ index ];
+		}
+#endif
+
+		public VALUE_GENERIC_TYPE setValue( final VALUE_GENERIC_TYPE v ) {
+			final VALUE_GENERIC_TYPE oldValue = value[ index ];
+			value[ index ] = v;
+			return oldValue;
+		}
+		  
+#if #values(primitive)
+		  
+		public VALUE_GENERIC_CLASS setValue( final VALUE_GENERIC_CLASS v ) {
+			return VALUE2OBJ( setValue( VALUE_CLASS2TYPE( v ) ) );
+		}
+
+#endif
+
+		@SuppressWarnings("unchecked")
+		public boolean equals( final Object o ) {
+			if (!(o instanceof Map.Entry)) return false;
+			Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+   				
+			return KEY_EQUALS( key[ index ], KEY_CLASS2TYPE( e.getKey() ) ) && VALUE_EQUALS( value[ index ], VALUE_CLASS2TYPE( e.getValue() ) );
+		}
+		  
+		public int hashCode() {
+			return KEY2JAVAHASH( key[ index ] ) ^ VALUE2JAVAHASH( value[ index ] );
+		}
+		 
+			  
+		public String toString() {
+			return key[ index ] + "=>" + value[ index ];
+		}
+	}
+
+
+#ifdef Linked
+
+	/** Modifies the {@link #link} vector so that the given entry is removed.
+	 *
+	 * <P>If the given entry is the first or the last one, this method will complete
+	 * in constant time; otherwise, it will have to search for the given entry.
+	 *
+	 * @param i the index of an entry. 
+	 */
+	protected void fixPointers( final int i ) {
+		if ( size == 0 ) {
+			first = last = -1;
+			return;
+		}
+		if ( first == i ) {
+			first = GET_NEXT(link[ i ]);
+			if (0 <= first) {
+				// Special case of SET_PREV( link[ first ], -1 )
+				link[ first ] |= (-1 & 0xFFFFFFFFL) << 32;
+			}
+			return;
+		}
+		if ( last == i ) {
+			last = GET_PREV(link[ i ]);
+			if (0 <= last) {
+				// Special case of SET_NEXT( link[ last ], -1 )
+				link[ last ] |= -1 & 0xFFFFFFFFL;
+			}
+			return;
+		}
+		final long linki = link[ i ];
+		final int prev = GET_PREV(linki);
+		final int next = GET_NEXT(linki);
+		COPY_NEXT(link[ prev ], linki);
+		COPY_PREV(link[ next ], linki);
+	}
+
+
+	/** Modifies the {@link #link} vector for a shift from s to d.
+	 *
+	 * <P>If the given entry is the first or the last one, this method will complete
+	 * in constant time; otherwise, it will have to search for the given entry.
+	 *
+	 * @param s the source position.
+	 * @param d the destination position.
+	 */
+	protected void fixPointers( int s, int d ) {
+		if ( size == 1 ) {
+			first = last = d;
+			// Special case of SET_UPPER_LOWER( link[ d ], -1, -1 )
+			link[ d ] = -1L;
+			return;
+		}
+		if ( first == s ) {
+			first = d;
+			SET_PREV( link[ GET_NEXT(link[ s ]) ], d );
+			link[ d ] = link[ s ];
+			return;
+		}
+		if ( last == s ) {
+			last = d;
+			SET_NEXT( link[ GET_PREV(link[ s ])], d );
+			link[ d ] = link[ s ];
+			return;
+		}
+		final long links = link[ s ];
+		final int prev = GET_PREV(links);
+		final int next = GET_NEXT(links);
+		SET_NEXT( link[ prev ], d );
+		SET_PREV( link[ next ], d );
+		link[ d ] = links;
+	}
+
+
+	/** Returns the first key of this map in iteration order.
+	 *
+	 * @return the first key in iteration order.
+	 */
+	public KEY_GENERIC_TYPE FIRST_KEY() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		return key[ first ];
+	}
+
+
+	/** Returns the last key of this map in iteration order.
+	 *
+	 * @return the last key in iteration order.
+	 */
+	public KEY_GENERIC_TYPE LAST_KEY() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		return key[ last ];
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; }
+
+	public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); }
+	public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+	public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+
+
+	/** A list iterator over a linked map.
+	 *
+	 * <P>This class provides a list iterator over a linked hash map. The empty constructor runs in 
+	 * constant time. The one-argument constructor needs to search for the given key, but it is 
+	 * optimized for the case of {@link java.util.SortedMap#lastKey()}, in which case runs in constant time, too.
+	 */
+	private class MapIterator {
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). */
+		int prev = -1;
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). */
+		int next = -1;
+		/** The last entry that was returned (or -1 if we did not iterate or used {@link java.util.Iterator#remove()}). */
+		int curr = -1;
+		/** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this iterator has been created using the nonempty constructor.*/
+		int index = -1;
+
+		private MapIterator() {
+			next = first;
+			index = 0;
+		}
+
+		private MapIterator( final KEY_GENERIC_TYPE from ) {
+			if ( KEY_EQUALS( key[ last ], from ) ) {
+				prev = last;
+				index = size;
+			}
+			else {
+				// The starting point.
+				int pos = KEY2INTHASH( from ) & mask;
+
+				// There's always an unused entry.
+				while( used[ pos ] ) {
+					if ( KEY_EQUALS( key[ pos ], from ) ) {
+						// Note: no valid index known.
+						next = GET_NEXT( link[ pos ] );
+						prev = pos;
+						return;
+					}
+					pos = ( pos + 1 ) & mask;
+				}
+				throw new NoSuchElementException( "The key " + from + " does not belong to this map." );
+			}
+		}
+					 
+		public boolean hasNext() { return next != -1; }
+		public boolean hasPrevious() { return prev != -1; }
+
+		private final void ensureIndexKnown() {
+			if ( index >= 0 ) return;
+			if ( prev == -1 ) {
+				index = 0;
+				return;
+			}
+			if ( next == -1 ) {
+				index = size;
+				return;
+			}
+			int pos = first;
+			index = 1;
+			while( pos != prev ) {
+				pos = GET_NEXT( link[ pos ] );
+				index++;
+			}
+		}
+
+		public int nextIndex() {
+			ensureIndexKnown();
+			return index;
+		}
+
+		public int previousIndex() {
+			ensureIndexKnown();
+			return index - 1;
+		}
+
+					 
+		public int nextEntry() {
+			if ( ! hasNext() ) return size();
+
+			curr = next;
+			next = GET_NEXT(link[ curr ]);
+			prev = curr;
+
+			if ( index >= 0 ) index++;
+
+			return curr;
+		}
+
+		public int previousEntry() {
+			if ( ! hasPrevious() ) return -1;
+
+			curr = prev;
+			prev = GET_PREV(link[ curr ]);
+			next = curr;
+
+			if ( index >= 0 ) index--;
+
+			return curr;
+		}
+		
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			ensureIndexKnown();
+			if ( curr == -1 ) throw new IllegalStateException();
+
+			if ( curr == prev ) {
+				/* If the last operation was a next(), we are removing an entry that preceeds
+				   the current index, and thus we must decrement it. */
+				index--;
+				prev = GET_PREV(link[ curr ]);
+			}
+			else 
+				next = GET_NEXT(link[ curr ]);
+
+			size--;
+			/* Now we manually fix the pointers. Because of our knowledge of next
+			   and prev, this is going to be faster than calling fixPointers(). */
+			if ( prev == -1 ) first = next;
+			else 
+				SET_NEXT( link[ prev ], next );
+			if ( next == -1 ) last = prev;
+			else
+				SET_PREV( link[ next ], prev );
+
+			int last, slot, pos = curr;
+
+			// We have to horribly duplicate the shiftKeys() code because we need to update next/prev.			
+			for(;;) {
+				pos = ( ( last = pos ) + 1 ) & mask;
+				while( used[ pos ] ) {
+					slot = KEY2INTHASH( key[ pos ] ) & mask;
+					if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
+					pos = ( pos + 1 ) & mask;
+				}
+	
+				if ( ! used[ pos ] ) break;
+				key[ last ] = key[ pos ];			
+				value[ last ] = value[ pos ];			
+				if ( next == pos ) next = last;
+				if ( prev == pos ) prev = last;
+				fixPointers( pos, last );
+			}
+
+			used[ last ] = false;
+#if #keys(reference)
+			key[ last ] = null;
+#endif
+#if #values(reference)
+			value[ last ] = null;
+#endif
+			curr = -1;
+		}
+
+		public int skip( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasNext() ) nextEntry(); 
+			return n - i - 1;
+		}
+
+		public int back( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasPrevious() ) previousEntry(); 
+			return n - i - 1;
+		}
+	}
+
+	private class EntryIterator extends MapIterator implements ObjectListIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		private MapEntry entry;
+	
+		public EntryIterator() {}
+
+		public EntryIterator( KEY_GENERIC_TYPE from ) {
+			super( from );
+		}
+
+		public MapEntry next() {
+			return entry = new MapEntry( nextEntry() );
+		}
+
+		public MapEntry previous() {
+			return entry = new MapEntry( previousEntry() );
+		}
+		
+		@Override
+		public void remove() {
+			super.remove();
+			entry.index = -1; // You cannot use a deleted entry.
+		}
+
+		public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+		public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+	}
+
+	private class FastEntryIterator extends MapIterator implements ObjectListIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		final BasicEntry KEY_VALUE_GENERIC entry = new BasicEntry KEY_VALUE_GENERIC ( KEY_NULL, VALUE_NULL );
+	
+		public FastEntryIterator() {}
+
+		public FastEntryIterator( KEY_GENERIC_TYPE from ) {
+			super( from );
+		}
+
+		public BasicEntry KEY_VALUE_GENERIC next() {
+			final int e = nextEntry();
+			entry.key = key[ e ];
+			entry.value = value[ e ];
+			return entry;
+		}
+
+		public BasicEntry KEY_VALUE_GENERIC previous() {
+			final int e = previousEntry();
+			entry.key = key[ e ];
+			entry.value = value[ e ];
+			return entry;
+		}
+
+		public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+		public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+	}
+
+#else	 
+
+	/** An iterator over a hash map. */
+
+	private class MapIterator {
+		/** The index of the next entry to be returned, if positive or zero. If negative, the next entry to be
+			returned, if any, is that of index -pos -2 from the {@link #wrapped} list. */
+		int pos = OPEN_HASH_MAP.this.n;
+		/** The index of the last entry that has been returned. It is -1 if either
+			we did not return an entry yet, or the last returned entry has been removed. */
+		int last = -1;
+		/** A downward counter measuring how many entries must still be returned. */
+		int c = size;
+		/** A lazily allocated list containing the keys of elements that have wrapped around the table because of removals; such elements
+			would not be enumerated (other elements would be usually enumerated twice in their place). */
+		ARRAY_LIST KEY_GENERIC wrapped;
+		
+		{ 
+			final boolean used[] = OPEN_HASH_MAP.this.used;
+			if ( c != 0 ) while( ! used[ --pos ] );
+		}
+
+		public boolean hasNext() {
+			return c != 0;
+		}
+
+		public int nextEntry() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+
+			c--;
+			// We are just enumerating elements from the wrapped list.
+			if ( pos < 0 ) {
+				final KEY_GENERIC_TYPE k = wrapped.GET_KEY( - ( last = --pos ) - 2  );
+				
+				// The starting point.
+				int pos = KEY2INTHASH( k ) & mask;
+
+				// There's always an unused entry.
+				while( used[ pos ] ) {
+					if ( KEY_EQUALS( key[ pos ], k ) ) return pos;
+					pos = ( pos + 1 ) & mask;
+				}
+			}
+			
+			last = pos;
+			
+			//System.err.println( "Count: " + c );
+			if ( c != 0 ) {
+				final boolean used[] = OPEN_HASH_MAP.this.used;
+				while ( pos-- != 0 && !used[ pos ] );
+				// When here pos < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty.
+			}
+			
+			return last;
+		}
+
+		/** Shifts left entries with the specified hash code, starting at the specified position,
+		 * and empties the resulting free entry. If any entry wraps around the table, instantiates
+		 * lazily {@link #wrapped} and stores the entry key.
+		 *
+		 * @param pos a starting position.
+		 * @return the position cleared by the shifting process.
+		 */
+		protected final int shiftKeys( int pos ) {
+			// Shift entries with the same hash.
+			int last, slot;
+	
+			for(;;) {
+				pos = ( ( last = pos ) + 1 ) & mask;
+				
+				while( used[ pos ] ) {
+					slot = KEY2INTHASH( key[ pos ] ) & mask;
+					if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
+					pos = ( pos + 1 ) & mask;
+				}
+	
+				if ( ! used[ pos ] ) break;
+				if ( pos < last ) {
+					// Wrapped entry.
+					if ( wrapped == null ) wrapped = new ARRAY_LIST KEY_GENERIC();
+					wrapped.add( key[ pos ] );
+				}
+
+				key[ last ] = key[ pos ];			
+				value[ last ] = value[ pos ];	
+			}
+	
+			used[ last ] = false;
+#if #keys(reference)
+			key[ last ] = null;
+#endif
+#if #values(reference)
+			value[ last ] = null;
+#endif
+			return last;
+		}
+	
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			if ( last == -1 ) throw new IllegalStateException();
+			if ( pos < -1 ) {
+				// We're removing wrapped entries.
+#if #keys(reference)
+				OPEN_HASH_MAP.this.remove( wrapped.set( - pos - 2, null ) );
+#else
+				OPEN_HASH_MAP.this.REMOVE_VALUE( wrapped.GET_KEY( - pos - 2 ) );
+#endif
+				last = -1;
+				return;
+			}
+			size--;
+			if ( shiftKeys( last ) == pos && c > 0 ) {
+				c++;
+				nextEntry();
+			}
+			last = -1; // You can no longer remove this entry.
+			if ( ASSERTS ) checkTable();
+		}
+
+		public int skip( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasNext() ) nextEntry(); 
+			return n - i - 1;
+		}
+	}
+
+
+	private class EntryIterator extends MapIterator implements ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		private MapEntry entry;
+	
+		public MAP.Entry KEY_VALUE_GENERIC next() {
+			return entry = new MapEntry( nextEntry() );
+		}
+
+		@Override
+		public void remove() {
+			super.remove();
+			entry.index = -1; // You cannot use a deleted entry.
+		}
+	}
+
+	private class FastEntryIterator extends MapIterator implements ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		final BasicEntry KEY_VALUE_GENERIC entry = new BasicEntry KEY_VALUE_GENERIC ( KEY_NULL, VALUE_NULL );
+		public BasicEntry KEY_VALUE_GENERIC next() {
+			final int e = nextEntry();
+			entry.key = key[ e ];
+			entry.value = value[ e ];
+			return entry;
+		}
+	}
+
+#endif
+
+
+
+
+
+#ifdef Linked
+	private final class MapEntrySet extends AbstractObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> implements FastSortedEntrySet KEY_VALUE_GENERIC {
+
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+			return new EntryIterator();
+		}
+
+		public Comparator<? super MAP.Entry KEY_VALUE_GENERIC> comparator() { return null; }
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> subSet( MAP.Entry KEY_VALUE_GENERIC fromElement, MAP.Entry KEY_VALUE_GENERIC toElement) { throw new UnsupportedOperationException(); }
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> headSet( MAP.Entry KEY_VALUE_GENERIC toElement ) { throw new UnsupportedOperationException(); }
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> tailSet( MAP.Entry KEY_VALUE_GENERIC fromElement ) { throw new UnsupportedOperationException(); }
+
+		public MAP.Entry KEY_VALUE_GENERIC first() { 
+			if ( size == 0 ) throw new NoSuchElementException();
+			return new MapEntry( OPEN_HASH_MAP.this.first ); 
+		}
+
+		public MAP.Entry KEY_VALUE_GENERIC last() { 
+			if ( size == 0 ) throw new NoSuchElementException();
+			return new MapEntry( OPEN_HASH_MAP.this.last ); 
+		}
+		
+#else
+	private final class MapEntrySet extends AbstractObjectSet<MAP.Entry KEY_VALUE_GENERIC> implements FastEntrySet KEY_VALUE_GENERIC {
+
+		public ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+			return new EntryIterator();
+		}
+
+		public ObjectIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator() {
+			return new FastEntryIterator();
+		}
+#endif					 
+					 
+		@SuppressWarnings("unchecked")
+		public boolean contains( final Object o ) {
+			if ( !( o instanceof Map.Entry ) ) return false;
+			final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+
+			final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE( e.getKey() );
+
+			// The starting point.
+			int pos = KEY2INTHASH( k ) & mask;
+
+			// There's always an unused entry.
+			while( used[ pos ] ) {
+				if ( KEY_EQUALS( key[ pos ], k ) ) return VALUE_EQUALS( value[ pos ], VALUE_CLASS2TYPE( e.getValue() ) );
+				pos = ( pos + 1 ) & mask;
+			}
+			return false;
+		}
+			 
+		@SuppressWarnings("unchecked")
+		public boolean remove( final Object o ) {
+			if ( !( o instanceof Map.Entry ) ) return false;
+			final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+
+			final KEY_GENERIC_TYPE k = KEY_CLASS2TYPE( e.getKey() );
+
+			// The starting point.
+			int pos = KEY2INTHASH( k ) & mask;
+
+			// There's always an unused entry.
+			while( used[ pos ] ) {
+				if ( KEY_EQUALS( key[ pos ], k ) )  {
+					OPEN_HASH_MAP.this.remove( e.getKey() );
+					return true;
+				}
+				pos = ( pos + 1 ) & mask;
+			}
+			return false;
+		}
+			 
+		public int size() {
+			return size;
+		}
+			 
+		public void clear() {
+			OPEN_HASH_MAP.this.clear();
+		}
+
+#ifdef Linked
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator( final MAP.Entry KEY_VALUE_GENERIC from ) {
+			return new EntryIterator( KEY_CLASS2TYPE( from.getKey() ) );
+		}
+
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator() {
+			return new FastEntryIterator();
+		}
+				
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator( final MAP.Entry KEY_VALUE_GENERIC from ) {
+			return new FastEntryIterator( KEY_CLASS2TYPE( from.getKey() ) );
+		}
+				
+#endif
+	}
+
+
+#ifdef Linked
+	public FastSortedEntrySet KEY_VALUE_GENERIC ENTRYSET() {
+		if ( entries == null ) entries = new MapEntrySet();
+#else
+	public FastEntrySet KEY_VALUE_GENERIC ENTRYSET() {
+		if ( entries == null ) entries = new MapEntrySet();
+#endif
+		return entries;
+	}
+
+
+	/** An iterator on keys.
+	 *
+	 * <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
+	 * (and possibly their type-specific counterparts) so that they return keys
+	 * instead of entries.
+	 */
+
+#ifdef Linked
+	private final class KeyIterator extends MapIterator implements KEY_LIST_ITERATOR KEY_GENERIC {
+		public KeyIterator( final KEY_GENERIC_TYPE k ) { super( k ); }
+		public KEY_GENERIC_TYPE PREV_KEY() { return key[ previousEntry() ]; }
+		public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+#if ! #keys(reference)
+		public KEY_GENERIC_CLASS previous() { return KEY2OBJ( key[ previousEntry() ] ); }
+		public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+		public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif
+
+#else
+	private final class KeyIterator extends MapIterator implements KEY_ITERATOR KEY_GENERIC {
+#endif
+
+		public KeyIterator() { super(); }
+		public KEY_GENERIC_TYPE NEXT_KEY() { return key[ nextEntry() ]; }
+#if ! #keys(reference)
+		public KEY_GENERIC_CLASS next() { return KEY2OBJ( key[ nextEntry() ] ); }
+#endif
+	}
+
+
+
+#ifdef Linked
+	private final class KeySet extends ABSTRACT_SORTED_SET KEY_GENERIC {
+
+		public KEY_LIST_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) {
+			return new KeyIterator( from );
+		}
+
+		public KEY_LIST_ITERATOR KEY_GENERIC iterator() {
+			return new KeyIterator();
+		}
+#else
+	private final class KeySet extends ABSTRACT_SET KEY_GENERIC {
+
+		public KEY_ITERATOR KEY_GENERIC iterator() {
+			return new KeyIterator();
+		}
+#endif
+
+		public int size() {
+			return size;
+		}
+
+		public boolean contains( KEY_TYPE k ) {
+			return containsKey( k );
+		}
+					 
+		public boolean remove( KEY_TYPE k ) {
+			final int oldSize = size;
+			OPEN_HASH_MAP.this.remove( k );
+			return size != oldSize;
+		}
+					 
+		public void clear() {
+			OPEN_HASH_MAP.this.clear();
+		}
+
+
+#ifdef Linked
+		public KEY_GENERIC_TYPE FIRST() {
+			if ( size == 0 ) throw new NoSuchElementException();
+			return key[ first ];
+		}
+
+		public KEY_GENERIC_TYPE LAST() {
+			if ( size == 0 ) throw new NoSuchElementException();
+			return key[ last ];
+		}
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; }
+
+		final public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); }
+		final public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+		final public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+#endif
+	}
+
+
+#ifdef Linked
+	public SORTED_SET KEY_GENERIC keySet() {
+#else
+	public SET KEY_GENERIC keySet() {
+#endif
+		if ( keys == null ) keys = new KeySet();
+		return keys;
+	}
+
+
+	/** An iterator on values.
+	 *
+	 * <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
+	 * (and possibly their type-specific counterparts) so that they return values
+	 * instead of entries.
+	 */
+
+#ifdef Linked
+	private final class ValueIterator extends MapIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC {
+		public VALUE_GENERIC_TYPE PREV_VALUE() { return value[ previousEntry() ]; }
+
+#if ! #values(reference)
+		public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( value[ previousEntry() ] ); }
+		public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+		public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif									
+		public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+		public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+#else
+	private final class ValueIterator extends MapIterator implements VALUE_ITERATOR VALUE_GENERIC {
+#endif
+
+		public ValueIterator() { super(); }
+		public VALUE_GENERIC_TYPE NEXT_VALUE() { return value[ nextEntry() ]; }
+#if ! #values(reference)
+		public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( value[ nextEntry() ] ); }
+#endif
+	}
+
+	public VALUE_COLLECTION VALUE_GENERIC values() {
+		if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() {
+
+				public VALUE_ITERATOR VALUE_GENERIC iterator() {
+					return new ValueIterator();
+				}
+
+				public int size() {
+					return size;
+				}
+
+				public boolean contains( VALUE_TYPE v ) {
+					return containsValue( v );
+				}
+
+				public void clear() {
+					OPEN_HASH_MAP.this.clear();
+				}
+			};
+
+		return values;
+	}
+
+
+	/** A no-op for backward compatibility. The kind of tables implemented by
+	 * this class never need rehashing.
+	 *
+	 * <P>If you need to reduce the table size to fit exactly
+	 * this set, use {@link #trim()}.
+	 *
+	 * @return true.
+	 * @see #trim()
+	 * @deprecated A no-op.
+	 */
+
+	@Deprecated
+	public boolean rehash() {
+		return true;
+	}
+
+
+	/** Rehashes the map, making the table as small as possible.
+	 * 
+	 * <P>This method rehashes the table to the smallest size satisfying the
+	 * load factor. It can be used when the set will not be changed anymore, so
+	 * to optimize access speed and size.
+	 *
+	 * <P>If the table size is already the minimum possible, this method
+	 * does nothing. 
+	 *
+	 * @return true if there was enough memory to trim the map.
+	 * @see #trim(int)
+	 */
+
+	public boolean trim() {
+		final int l = arraySize( size, f );
+		if ( l >= n ) return true;
+		try {
+			rehash( l );
+		}
+		catch(OutOfMemoryError cantDoIt) { return false; }
+		return true;
+	}
+
+
+	/** Rehashes this map if the table is too large.
+	 * 
+	 * <P>Let <var>N</var> be the smallest table size that can hold
+	 * <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current
+	 * table size is smaller than or equal to <var>N</var>, this method does
+	 * nothing. Otherwise, it rehashes this map in a table of size
+	 * <var>N</var>.
+	 *
+	 * <P>This method is useful when reusing maps.  {@linkplain #clear() Clearing a
+	 * map} leaves the table size untouched. If you are reusing a map
+	 * many times, you can call this method with a typical
+	 * size to avoid keeping around a very large table just
+	 * because of a few large transient maps.
+	 *
+	 * @param n the threshold for the trimming.
+	 * @return true if there was enough memory to trim the map.
+	 * @see #trim()
+	 */
+
+	public boolean trim( final int n ) {
+		final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) );
+		if ( this.n <= l ) return true;
+		try {
+			rehash( l );
+		}
+		catch( OutOfMemoryError cantDoIt ) { return false; }
+		return true;
+	}
+
+	/** Resizes the map.
+	 *
+	 * <P>This method implements the basic rehashing strategy, and may be
+	 * overriden by subclasses implementing different rehashing strategies (e.g.,
+	 * disk-based rehashing). However, you should not override this method
+	 * unless you understand the internal workings of this class.
+	 *
+	 * @param newN the new size
+	 */
+
+	@SuppressWarnings("unchecked")
+	protected void rehash( final int newN ) {
+#ifdef Linked
+		int i = first, prev = -1, newPrev = -1, t, pos;
+#else
+		int i = 0, pos;
+		final boolean used[] = this.used;
+#endif
+
+		KEY_GENERIC_TYPE k;
+
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final VALUE_GENERIC_TYPE value[] = this.value;
+
+		final int newMask = newN - 1;
+		final KEY_GENERIC_TYPE newKey[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ newN ];
+		final VALUE_GENERIC_TYPE newValue[] = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[newN];
+		final boolean newUsed[] = new boolean[ newN ];
+
+#ifdef Linked
+		final long link[] = this.link;
+		final long newLink[] = new long[ newN ];
+		first = -1;
+#endif
+
+		for( int j = size; j-- != 0; ) {
+
+#ifndef Linked
+			while( ! used[ i ] ) i++;
+#endif
+
+			k = key[ i ];
+			pos = KEY2INTHASH( k ) & newMask;
+
+			while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask;
+				
+			newUsed[ pos ] = true;
+			newKey[ pos ] = k;
+			newValue[ pos ] = value[ i ];
+
+#ifdef Linked
+			if ( prev != -1 ) {
+				SET_NEXT( newLink[ newPrev ], pos );
+				SET_PREV( newLink[ pos ], newPrev );
+				newPrev = pos;
+			}
+			else {
+				newPrev = first = pos;
+				// Special case of SET(newLink[ pos ], -1, -1);
+				newLink[ pos ] = -1L;
+			}
+
+			t = i;
+			i = GET_NEXT(link[ i ]);
+			prev = t;
+
+#else
+			i++;
+#endif
+		}
+
+		n = newN;
+		mask = newMask;
+		maxFill = maxFill( n, f );
+		this.key = newKey;
+		this.value = newValue;
+		this.used = newUsed;
+#ifdef Linked
+		this.link = newLink;
+		this.last = newPrev;
+		if ( newPrev != -1 )
+			// Special case of SET_NEXT( newLink[ newPrev ], -1 );
+			newLink[ newPrev ] |= -1 & 0xFFFFFFFFL;
+#endif
+	}
+	 
+
+	/** Returns a deep copy of this map. 
+	 *
+	 * <P>This method performs a deep copy of this hash map; the data stored in the
+	 * map, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 *  @return a deep copy of this map.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public OPEN_HASH_MAP KEY_VALUE_GENERIC clone() {
+		OPEN_HASH_MAP KEY_VALUE_GENERIC c;
+		try {
+			c = (OPEN_HASH_MAP KEY_VALUE_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+
+		c.keys = null;
+		c.values = null;
+		c.entries = null;
+
+		c.key = key.clone();
+		c.value = value.clone();
+		c.used = used.clone();
+#ifdef Linked
+		c.link = link.clone();
+#endif
+#ifdef Custom
+		c.strategy = strategy;
+#endif
+		return c;
+	}
+
+
+	/** Returns a hash code for this map.
+	 *
+	 * This method overrides the generic method provided by the superclass. 
+	 * Since <code>equals()</code> is not overriden, it is important
+	 * that the value returned by this method is the same value as
+	 * the one returned by the overriden method.
+	 *
+	 * @return a hash code for this map.
+	 */
+
+	public int hashCode() {
+		int h = 0;
+		for( int j = size, i = 0, t = 0; j-- != 0; ) {
+			while( ! used[ i ] ) i++;
+#if #keys(reference)
+			if ( this != key[ i ] )
+#endif
+				t = KEY2JAVAHASH( key[ i ] );
+#if #values(reference)
+			if ( this != value[ i ] )
+#endif
+				t ^=  VALUE2JAVAHASH( value[ i ] );
+			h += t;
+			i++;
+		}
+		return h;
+	}
+
+
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final VALUE_GENERIC_TYPE value[] = this.value;
+		final MapIterator i = new MapIterator();
+
+		s.defaultWriteObject();
+
+		for( int j = size, e; j-- != 0; ) {
+			e = i.nextEntry();
+			s.WRITE_KEY( key[ e ] );
+			s.WRITE_VALUE( value[ e ] );
+		}
+	}
+
+
+
+	@SuppressWarnings("unchecked")
+	private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+
+		n = arraySize( size, f );
+		maxFill = maxFill( n, f );
+		mask = n - 1;
+		
+		final KEY_GENERIC_TYPE key[] = this.key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n ];
+		final VALUE_GENERIC_TYPE value[] = this.value = VALUE_GENERIC_ARRAY_CAST new VALUE_TYPE[ n ];
+		final boolean used[] = this.used = new boolean[ n ];
+#ifdef Linked
+		final long link[] = this.link = new long[ n ];
+		int prev = -1;
+		first = last = -1;
+#endif
+
+		KEY_GENERIC_TYPE k;
+		VALUE_GENERIC_TYPE v;
+
+		for( int i = size, pos = 0; i-- != 0; ) {
+
+			k = KEY_GENERIC_CAST s.READ_KEY();
+			v = VALUE_GENERIC_CAST s.READ_VALUE();
+
+			pos = KEY2INTHASH( k ) & mask;
+
+			while ( used[ pos ] ) pos = ( pos + 1 ) & mask;
+
+			used[ pos ] = true;
+			key[ pos ] = k;
+			value[ pos ] = v;
+			
+#ifdef Linked
+			if ( first != -1 ) {
+				SET_NEXT( link[ prev ], pos );
+				SET_PREV( link[ pos ], prev );
+				prev = pos;
+			}
+			else {
+				prev = first = pos;
+				// Special case of SET_PREV( newLink[ pos ], -1 );
+				link[ pos ] |= (-1L & 0xFFFFFFFFL) << 32;
+			}
+#endif
+		}
+
+#ifdef Linked
+		last = prev;
+		if ( prev != -1 ) 
+			// Special case of SET_NEXT( link[ prev ], -1 );
+			link[ prev ] |= -1 & 0xFFFFFFFFL; 
+#endif
+
+		if ( ASSERTS ) checkTable();
+	}
+
+
+
+#ifdef ASSERTS_CODE
+	private void checkTable() {
+		final boolean[] used = this.used;
+		assert ( n & -n ) == n : "Table length is not a power of two: " + n;
+		assert n == key.length;
+		assert n == used.length;
+		int n = used.length;
+		while( n-- != 0 ) 
+			if ( used[ n ] && ! containsKey( key[ n ] ) ) 
+				throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" );
+
+#if #keys(primitive)
+		java.util.HashSet<KEY_GENERIC_CLASS> s = new java.util.HashSet<KEY_GENERIC_CLASS> ();
+#else
+		java.util.HashSet<Object> s = new java.util.HashSet<Object>();
+#endif
+		
+		for( int i = used.length; i-- != 0; )
+			if ( used[ i ] && ! s.add( key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice at position " + i );
+
+#ifdef Linked
+		KEY_BIDI_ITERATOR KEY_GENERIC i = keySet().iterator();
+		KEY_GENERIC_TYPE k;
+		n = size();
+		while( n-- != 0 ) 
+			if ( ! containsKey( k = i.NEXT_KEY() ) ) 
+				throw new AssertionError( "Linked hash table forward enumerates key " + k + ", but the key does not belong to the table" );
+
+		if ( i.hasNext() ) throw new AssertionError( "Forward iterator not exhausted" );
+
+		n = size();
+		if ( n > 0 ) {
+			i = keySet().iterator( LAST_KEY() );
+			while( n-- != 0 ) 
+				if ( ! containsKey( k = i.PREV_KEY() ) ) 
+					throw new AssertionError( "Linked hash table backward enumerates key " + k + ", but the key does not belong to the table" );
+			
+			if ( i.hasPrevious() ) throw new AssertionError( "Previous iterator not exhausted" );
+		}
+#endif
+	}
+#else
+	private void checkTable() {}
+#endif
+
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif !#keyclass(Reference)
+#ifdef Custom
+		int i = r.nextInt( 3 );
+		byte a[] = new byte[ i ];
+		while( i-- != 0 ) a[ i ] = (byte)r.nextInt();
+		return a;
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static VALUE_TYPE genValue() {
+#if #valueclass(Byte) || #valueclass(Short) || #valueclass(Character)
+		return (VALUE_TYPE)(r.nextInt());
+#elif #values(primitive)
+		return r.NEXT_VALUE();
+#elif !#valueclass(Reference)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+	private static final class ArrayComparator implements java.util.Comparator {
+		public int compare( Object a, Object b ) {
+			byte[] aa = (byte[])a;
+			byte[] bb = (byte[])b;
+			int length = Math.min( aa.length, bb.length );
+			for( int i = 0; i < length; i++ ) {
+				if ( aa[ i ] < bb[ i ] ) return -1;
+				if ( aa[ i ] > bb[ i ] ) return 1;
+			}
+			return aa.length == bb.length ? 0 : ( aa.length < bb.length ? -1 : 1 );
+		}
+	}
+
+	private static final class MockMap extends java.util.TreeMap {
+		private java.util.List list = new java.util.ArrayList();
+
+		public MockMap( java.util.Comparator c ) { super( c ); }
+
+		public Object put( Object k, Object v ) {
+			if ( ! containsKey( k ) ) list.add( k );
+			return super.put( k, v );
+		}
+
+		public void putAll( Map m ) {
+			java.util.Iterator i = m.entrySet().iterator();
+			while( i.hasNext() ) {
+				Map.Entry e = (Map.Entry)i.next();
+				put( e.getKey(), e.getValue() );
+			}
+		}
+
+		public Object remove( Object k ) {
+			if ( containsKey( k ) ) {
+				int i = list.size();
+				while( i-- != 0 ) if ( comparator().compare( list.get( i ), k ) == 0 ) {
+					list.remove( i );
+					break;
+				}
+			}
+			return super.remove( k );
+		}
+
+		private void justRemove( Object k ) { super.remove( k ); }
+		private java.util.Set justEntrySet() { return super.entrySet(); }
+		private java.util.Set justKeySet() { return super.keySet(); }
+
+		public java.util.Set keySet() {
+			return new java.util.AbstractSet() {
+					final java.util.Set keySet = justKeySet();
+					
+					public boolean contains( Object k ) { return keySet.contains( k ); }
+					public int size() { return keySet.size(); }
+					public java.util.Iterator iterator() {
+						return new java.util.Iterator() {
+								final java.util.Iterator iterator = list.iterator();
+								Object curr;
+								public Object next() { return curr = iterator.next(); }
+								public boolean hasNext() { return iterator.hasNext(); }
+								public void remove() { 
+									justRemove( curr );
+									iterator.remove(); 
+								}
+							};
+
+					}
+				};
+
+		}
+
+		public java.util.Set entrySet() {
+			return new java.util.AbstractSet() {
+					final java.util.Set entrySet = justEntrySet();
+					
+					public boolean contains( Object k ) { return entrySet.contains( k ); }
+					public int size() { return entrySet.size(); }
+					public java.util.Iterator iterator() {
+						return new java.util.Iterator() {
+								final java.util.Iterator iterator = list.iterator();
+								Object curr;
+								public Object next() { 
+									curr = iterator.next();
+#if #valueclass(Reference)
+#if #keyclass(Reference)
+									return new ABSTRACT_MAP.BasicEntry( (Object)curr, (Object)get(curr) ) {
+#else
+									return new ABSTRACT_MAP.BasicEntry( (KEY_CLASS)curr, (Object)get(curr) ) {
+#endif
+#else
+#if #keyclass(Reference)
+									return new ABSTRACT_MAP.BasicEntry( (Object)curr, (VALUE_CLASS)get(curr) ) {
+#else
+									return new ABSTRACT_MAP.BasicEntry( (KEY_CLASS)curr, (VALUE_CLASS)get(curr) ) {
+#endif
+#endif
+											public VALUE_TYPE setValue( VALUE_TYPE v ) {
+												return VALUE_OBJ2TYPE(put( getKey(), VALUE2OBJ(v) ));
+											}
+										}; 
+								}
+								public boolean hasNext() { return iterator.hasNext(); }
+								public void remove() { 
+									justRemove( ((Map.Entry)curr).getKey() );
+									iterator.remove(); 
+								}
+							};
+
+					}
+				};
+
+		}
+
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+
+	private static void speedTest( int n, float f, boolean comp ) {
+#ifndef Custom
+		int i, j;
+		OPEN_HASH_MAP m;
+#ifdef Linked
+		java.util.LinkedHashMap t;
+#else
+		java.util.HashMap t;
+#endif
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		VALUE_TYPE v[] = new VALUE_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+			v[i] = genValue();
+		}
+
+		double totPut = 0, totYes = 0, totNo = 0, totIter = 0, totRemYes = 0, totRemNo = 0, d;
+
+		if ( comp ) { for( j = 0; j < 20; j++ ) {
+
+#ifdef Linked
+			t = new java.util.LinkedHashMap( 16 );
+#else
+			t = new java.util.HashMap( 16 );
+#endif
+
+			/* We put pairs to t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totPut += d; 				
+			System.out.print("Put: " + format( d ) +" K/s " );
+
+			/* We check for pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.containsKey( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.containsKey( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on t. */
+			ms = System.currentTimeMillis();
+			for( java.util.Iterator it = t.entrySet().iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 				
+			System.out.print("Iter: " + format( d ) +" K/s " );
+				
+			/* We delete pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 				
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+				
+			/* We delete pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+		System.out.println();
+		System.out.println( "java.util Put: " + format( totPut/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + "K/s" );
+
+		System.out.println();
+
+		totPut = totYes = totNo = totIter = totRemYes = totRemNo = 0;
+
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new OPEN_HASH_MAP( 16, f );
+
+			/* We put pairs to m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.put( k[i], v[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totPut += d; 				
+			System.out.print("Put: " + format( d ) +" K/s " );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.containsKey( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.containsKey( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on m. */
+			ms = System.currentTimeMillis();
+			for( java.util.Iterator it = m.entrySet().iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 	 
+			System.out.print("Iter: " + format( d ) +" K/s " );
+
+			/* We delete pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 	
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+
+			/* We delete pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );	 
+
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Put: " + format( totPut/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s" );
+
+		System.out.println();
+#endif
+	}
+
+	private static boolean valEquals(Object o1, Object o2) {
+		return o1 == null ? o2 == null : o1.equals(o2);
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	protected static void test( int n, float f ) {
+#if !defined(Custom) || #keys(reference)
+
+#ifdef Custom
+		OPEN_HASH_MAP m = new OPEN_HASH_MAP(Hash.DEFAULT_INITIAL_SIZE, f, it.unimi.dsi.fastutil.bytes.ByteArrays.HASH_STRATEGY);
+#else
+		OPEN_HASH_MAP m = new OPEN_HASH_MAP(Hash.DEFAULT_INITIAL_SIZE, f);
+#endif
+
+#ifdef Linked
+#ifdef Custom
+		Map t = new MockMap( new ArrayComparator() );
+#else
+		Map t = new java.util.LinkedHashMap();
+#endif
+#else
+#ifdef Custom
+		Map t = new java.util.TreeMap(new ArrayComparator());
+#else
+		Map t = new java.util.HashMap();
+#endif
+#endif
+
+		/* First of all, we fill t with random data. */
+
+		for(int i=0; i<n;  i++ ) t.put( KEY2OBJ(genKey()), VALUE2OBJ(genValue()) );
+		  
+		/* Now we add to m the same data */
+		  
+		m.putAll(t);
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after insertion");
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			if (!valEquals(e.getValue(), m.get(e.getKey()))) 
+				System.out.println("Error (" + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			if (!valEquals(e.getValue(), t.get(e.getKey()))) 
+				System.out.println("Error (" + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on m)");
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		  
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!m.containsKey(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+o+") after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+			if (!m.keySet().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+o+", in keySet()) after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!t.containsKey(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+			if (!t.keySet().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key (in keySet()) after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		  
+		for(java.util.Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!m.containsValue(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+			if (!m.values().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value (in values()) after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!t.containsValue(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+			if (!t.values().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value (in values()) after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.containsKey(KEY2OBJ(T)) != t.containsKey(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in keys between t and m (polymorphic method)");
+				System.exit( 1 );
+			}
+
+#if ( #keys(reference) ) && ! ( #values(reference) )
+			if ((m.GET_VALUE(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+				t.get(KEY2OBJ(T)) != null && 
+				! VALUE2OBJ(m.GET_VALUE(T)).equals(t.get(KEY2OBJ(T)))) 
+#else
+				if ((m.get(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+					t.get(KEY2OBJ(T)) != null && 
+					! m.get(KEY2OBJ(T)).equals(t.get(KEY2OBJ(T)))) 
+#endif
+					{
+						System.out.println("Error (" + seed + "): divergence between t and m (polymorphic method)");
+						System.exit( 1 );
+					}
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (!valEquals(m.get(KEY2OBJ(T)), t.get(KEY2OBJ(T)))) {
+				System.out.println("Error (" + seed + "): divergence between t and m (standard method)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			VALUE_TYPE U = genValue();
+			if (!valEquals(m.put(KEY2OBJ(T), VALUE2OBJ(U)), t.put(KEY2OBJ(T), VALUE2OBJ(U)))) {
+				System.out.println("Error (" + seed + "): divergence in put() between t and m");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (!valEquals(m.remove(KEY2OBJ(T)), t.remove(KEY2OBJ(T)))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after removal");
+
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(java.util.Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			if (!valEquals(e.getValue(), m.get(e.getKey()))) {
+				System.out.println("Error (" + seed + "): m and t differ on an entry ("+e+") after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			if (!valEquals(e.getValue(), t.get(e.getKey()))) {
+				System.out.println("Error (" + seed + "): m and t differ on an entry ("+e+") after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		  
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!m.containsKey(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+o+") after removal (iterating on t)");
+				System.exit( 1 );
+			}
+			if (!m.keySet().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+o+", in keySet()) after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!t.containsKey(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key after removal (iterating on m)");
+				System.exit( 1 );
+			}
+			if (!t.keySet().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key (in keySet()) after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		  
+		for(java.util.Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!m.containsValue(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value after removal (iterating on t)");
+				System.exit( 1 );
+			}
+			if (!m.values().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value (in values()) after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!t.containsValue(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value after removal (iterating on m)");
+				System.exit( 1 );
+			}
+			if (!t.values().contains(o)) {
+				System.out.println("Error (" + seed + "): m and t differ on a value (in values()) after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+
+		int h = m.hashCode();
+
+
+		/* Now we save and read m. */
+
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m = (OPEN_HASH_MAP)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+
+#if !#keyclass(Reference) && !#valueclass(Reference)
+		if (m.hashCode() != h) System.out.println("Error (" + seed + "): hashCode() changed after save/read");
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			if (!valEquals(m.get(o),t.get(o))) {
+				System.out.println("Error (" + seed + "): m and t differ on an entry after save/read");
+				System.exit( 1 );
+			}
+		}
+#else
+		m.clear();
+		m.putAll( t );
+#endif
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			VALUE_TYPE U = genValue();
+			if (!valEquals(m.put(KEY2OBJ(T), VALUE2OBJ(U)), t.put(KEY2OBJ(T), VALUE2OBJ(U)))) {
+				System.out.println("Error (" + seed + "): divergence in put() between t and m after save/read");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (!valEquals(m.remove(KEY2OBJ(T)), t.remove(KEY2OBJ(T)))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m after save/read");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after post-save/read removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after post-save/read removal");
+
+
+
+#ifdef Linked
+
+
+		/* Now we play with iterators. */
+
+		{
+			java.util.ListIterator i, j;
+			Object J;
+			Map.Entry E, F;
+			i = (java.util.ListIterator)m.entrySet().iterator(); 
+			j = new java.util.LinkedList( t.entrySet() ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( (E=(java.util.Map.Entry)i.next()).getKey(),  J = (F=(Map.Entry)j.next()).getKey() ), "Error (" + seed + "): divergence in next()" );
+#else
+					ensure( (E=(java.util.Map.Entry)i.next()).getKey().equals( J = (F=(Map.Entry)j.next()).getKey() ), "Error (" + seed + "): divergence in next()" );
+#endif
+
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Object U = VALUE2OBJ(genValue());
+						E.setValue( U );
+						t.put( F.getKey(), U );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( (E=(java.util.Map.Entry)i.previous()).getKey(), J = (F=(Map.Entry)j.previous()).getKey() ), "Error (" + seed + "): divergence in previous()" );
+#else
+					ensure( (E=(java.util.Map.Entry)i.previous()).getKey().equals( J = (F=(Map.Entry)j.previous()).getKey() ), "Error (" + seed + "): divergence in previous()" );
+#endif
+
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Object U = VALUE2OBJ(genValue());
+						E.setValue( U );
+						t.put( F.getKey(), U );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+		  
+		if ( t.size() > 0 ) {
+			java.util.ListIterator i, j;
+			Object J;
+			j = new java.util.LinkedList( t.keySet() ).listIterator();
+			int e = r.nextInt( t.size() );
+			Object from;
+			do from = j.next(); while( e-- != 0 );
+
+			i = (java.util.ListIterator)((SORTED_SET)m.keySet()).iterator( KEY_OBJ2TYPE( from ) ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.next(), J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+#else
+					ensure( i.next().equals( J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+#endif
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.previous(), J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+#else
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+#endif
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex() (iterator with starting point " + from + ")" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex() (iterator with starting point " + from + ")" );
+
+			}
+
+		}
+		  
+
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + seed + "): ! t.equals( m ) after iteration" );
+
+#endif
+
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext(); ) m.remove(i.next()); 
+
+		if (!m.isEmpty())  {
+			System.out.println("Error (" + seed + "): m is not empty (as it should be)");
+			System.exit( 1 );
+		}
+		
+#ifdef NumericEnhancements
+#if #valueclass(Byte) || #valueclass(Character) || #valueclass(Short) || #valueclass(Integer) || #valueclass(Long)
+		/* Now we check that increment works properly, using random data */
+		
+		{
+			t.clear();
+			m.clear();
+			
+			for( int k = 0; k < 2*n; k++ ) {
+				KEY_TYPE T = genKey();
+				VALUE_TYPE U = genValue();
+				
+				VALUE_TYPE rU = m.increment(T, U);
+				VALUE_GENERIC_CLASS tU = (VALUE_GENERIC_CLASS) t.get(KEY2OBJ(T));
+				if (null == tU) {
+					ensure(m.defaultReturnValue() == rU, "Error (" + seed + "): map increment does not return proper starting value." );
+					t.put( KEY2OBJ(T), VALUE2OBJ((VALUE_TYPE) (m.defaultReturnValue() + U)) );
+				}
+				else {
+					t.put( KEY2OBJ(T), VALUE2OBJ((VALUE_TYPE) (((VALUE_TYPE) tU) + U)) );
+				}
+			}
+			
+			// Maps should contain identical values
+			ensure( new java.util.HashMap(m).equals(new java.util.HashMap(t)),
+				"Error(" + seed + "): incremented maps are not equal." );
+		}
+#endif
+#endif
+		
+
+#if (#keyclass(Integer) || #keyclass(Long)) && (#valueclass(Integer) || #valueclass(Long))
+		m = new OPEN_HASH_MAP(n, f);
+		t.clear();
+		int x;
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		int p = m.used.length;
+
+		for(int i=0; i<p; i++) {
+			for (int j=0; j<20; j++) {
+				m.put(i+(r.nextInt() % 10)*p, 1);
+				m.remove(i+(r.nextInt() % 10)*p);
+			}
+
+			for (int j=-10; j<10; j++) m.remove(i+j*p);
+		}
+		  
+		t.putAll(m);
+
+		/* Now all table entries are REMOVED. */
+
+		for(int i=0; i<(p*f)/10; i++) {
+			for (int j=0; j<10; j++) {
+				if (!valEquals(m.put(KEY2OBJ(x = i+(r.nextInt() % 10)*p), VALUE2OBJ(1)), t.put(KEY2OBJ(x), VALUE2OBJ(1))))
+					System.out.println("Error (" + seed + "): m and t differ on an entry during torture-test insertion.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test insertion");
+
+		for(int i=0; i<p/10; i++) {
+			for (int j=0; j<10; j++) {
+				if (!valEquals(m.remove(KEY2OBJ(x = i+(r.nextInt() % 10)*p)), t.remove(KEY2OBJ(x))))
+					System.out.println("Error (" + seed + "): m and t differ on an entry during torture-test removal.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test removal");
+
+		if (!m.equals(m.clone())) System.out.println("Error (" + seed + "): !m.equals(m.clone()) after torture-test removal");
+		if (!((OPEN_HASH_MAP)m.clone()).equals(m)) System.out.println("Error (" + seed + "): !m.clone().equals(m) after torture-test removal");
+
+		m.rehash();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after rehash()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after rehash()");
+
+		m.trim();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after trim()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after trim()");
+#endif
+
+		System.out.println("Test OK");
+		return;
+
+#endif
+	}
+
+
+	public static void main( String args[] ) {
+		float f = Hash.DEFAULT_LOAD_FACTOR;
+		int n  = Integer.parseInt(args[1]);
+		if (args.length>2) f = Float.parseFloat(args[2]);
+		if ( args.length > 3 ) r = new java.util.Random( seed = Long.parseLong( args[ 3 ] ) );
+		  
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, f, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n, f);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+			
+	}
+
+#endif
+
+}
diff --git a/drv/OpenHashSet.drv b/drv/OpenHashSet.drv
new file mode 100644
index 0000000..261bba1
--- /dev/null
+++ b/drv/OpenHashSet.drv
@@ -0,0 +1,2271 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.HashCommon;
+import it.unimi.dsi.fastutil.booleans.BooleanArrays;
+import static it.unimi.dsi.fastutil.HashCommon.arraySize;
+import static it.unimi.dsi.fastutil.HashCommon.maxFill;
+
+import java.util.Collection;
+#if #keys(primitive)
+import java.util.Iterator;
+#endif
+import java.util.NoSuchElementException;
+
+#ifdef Linked
+
+#if #keys(reference)
+import java.util.Comparator;
+#endif
+
+/**  A type-specific linked hash set with with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a set. The table is
+ * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <P>Iterators generated by this set will enumerate elements in the same order in which they
+ * have been added to the set (addition of elements already present 
+ * in the set does not change the iteration order). Note that this order has nothing in common with the natural
+ * order of the keys. The order is kept by means of a doubly linked list, represented
+ * <i>via</i> an array of longs parallel to the table.
+ *
+ * <P>This class implements the interface of a sorted set, so to allow easy
+ * access of the iteration order: for instance, you can get the first element
+ * in iteration order with {@link #first()} without having to create an
+ * iterator; however, this class partially violates the {@link java.util.SortedSet}
+ * contract because all subset methods throw an exception and {@link
+ * #comparator()} returns always <code>null</code>.
+ *
+ * <p>Additional methods, such as <code>addAndMoveToFirst()</code>, make it easy
+ * to use instances of this class as a cache (e.g., with LRU policy).
+ *
+ * <P>The iterators provided by this class are type-specific {@linkplain
+ * java.util.ListIterator list iterators}, and can be started at any
+ * element <em>which is in the set</em> (if the provided element 
+ * is not in the set, a {@link NoSuchElementException} exception will be thrown).
+ * If, however, the provided element is not the first or last element in the
+ * set, the first access to the list index will require linear time, as in the worst case
+ * the entire set must be scanned in iteration order to retrieve the positional
+ * index of the starting element. If you use just the methods of a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator},
+ * however, all operations will be performed in constant time.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_HASH_SET KEY_GENERIC extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#else
+
+#ifdef Custom
+
+/** A type-specific hash set with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy}
+ * is specified at creation time.
+ *
+ * <P>Instances of this class use a hash table to represent a set. The table is
+ * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <p><strong>Warning:</strong> The implementation of this class has significantly
+ * changed in <code>fastutil</code> 6.1.0. Please read the
+ * comments about this issue in the section “Faster Hash Tables” of the <a href="../../../../../overview-summary.html">overview</a>.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_HASH_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#else
+
+/**  A type-specific hash set with with a fast, small-footprint implementation.
+ *
+ * <P>Instances of this class use a hash table to represent a set. The table is
+ * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
+ * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
+ * methods} lets you control the size of the table; this is particularly useful
+ * if you reuse instances of this class.
+ *
+ * <p><strong>Warning:</strong> The implementation of this class has significantly
+ * changed in <code>fastutil</code> 6.1.0. Please read the
+ * comments about this issue in the section “Faster Hash Tables” of the <a href="../../../../../overview-summary.html">overview</a>.
+ *
+ * @see Hash
+ * @see HashCommon
+ */
+
+public class OPEN_HASH_SET KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable, Hash {
+
+#endif
+
+#endif
+
+    private static final long serialVersionUID = 0L;
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	/** The array of keys. */
+	protected transient KEY_GENERIC_TYPE key[];
+	 
+	/** The array telling whether a position is used. */
+	protected transient boolean used[];
+
+	/** The acceptable load factor. */
+	protected final float f;
+	 
+	/** The current table size. */
+	protected transient int n;
+
+	/** Threshold after which we rehash. It must be the table size times {@link #f}. */
+	protected transient int maxFill;
+
+	/** The mask for wrapping a position counter. */
+	protected transient int mask;
+
+	/** Number of entries in the set. */
+	protected int size;
+
+#ifdef Linked
+	/** The index of the first entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */
+	protected transient int first = -1;
+	/** The index of the last entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */
+	protected transient int last = -1;
+	/** For each entry, the next and the previous entry in iteration order,
+     * stored as <code>((prev & 0xFFFFFFFFL) << 32) | (next & 0xFFFFFFFFL)</code>.
+     * The first entry contains predecessor -1, and the last entry 
+     * contains successor -1. */
+	protected transient long link[];
+#endif
+
+#ifdef Custom
+	/** The hash strategy of this custom set. */
+	protected STRATEGY KEY_GENERIC strategy;
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set.
+	 *
+	 * <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
+	 *
+	 * @param expected the expected number of elements in the hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	@SuppressWarnings("unchecked")
+	public OPEN_HASH_SET( final int expected, final float f, final STRATEGY KEY_GENERIC strategy ) {
+		this.strategy = strategy;
+#else
+	/** Creates a new hash set.
+	 *
+	 * <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
+	 *
+	 * @param expected the expected number of elements in the hash set. 
+	 * @param f the load factor.
+	 */
+	@SuppressWarnings("unchecked")
+	public OPEN_HASH_SET( final int expected, final float f ) {
+#endif
+		if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
+		if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" );
+
+		this.f = f;
+
+		n = arraySize( expected, f );
+		mask = n - 1;
+		maxFill = maxFill( n, f );
+		key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n ];
+		used = new boolean[ n ];
+#ifdef Linked
+		link = new long[ n ];
+#endif
+	}
+	 
+	 
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param expected the expected number of elements in the hash set. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final int expected, final STRATEGY KEY_GENERIC strategy ) {
+		this( expected, DEFAULT_LOAD_FACTOR, strategy );
+	}
+
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 *
+	 * @param expected the expected number of elements in the hash set. 
+	 */
+	 
+	public OPEN_HASH_SET( final int expected ) {
+		this( expected, DEFAULT_LOAD_FACTOR );
+	}
+
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final STRATEGY KEY_GENERIC strategy ) {
+	this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements
+	 * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
+	 */
+	 
+	public OPEN_HASH_SET() {
+		this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR );
+	} 
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final Collection<? extends KEY_GENERIC_CLASS> c, final float f, final STRATEGY KEY_GENERIC strategy ) {
+	this( c.size(), f, strategy );
+		addAll( c );
+	}
+#else
+	/** Creates a new hash set copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_SET( final Collection<? extends KEY_GENERIC_CLASS> c, final float f ) {
+		this( c.size(), f );
+		addAll( c );
+	}
+#endif
+
+
+
+#ifdef Custom
+	/** Creates a new hash set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash set. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final Collection<? extends KEY_GENERIC_CLASS> c, final STRATEGY KEY_GENERIC strategy ) {
+		this( c, DEFAULT_LOAD_FACTOR, strategy );
+	}
+
+#else
+	/** Creates a new hash set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given collection.
+	 *
+	 * @param c a {@link Collection} to be copied into the new hash set. 
+	 */
+	 
+	public OPEN_HASH_SET( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		this( c, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final float f, STRATEGY KEY_GENERIC strategy ) {
+	this( c.size(), f, strategy );
+		addAll( c );
+	}
+#else
+	/** Creates a new hash set copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final float f ) {
+		this( c.size(), f );
+		addAll( c );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash set. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c, final STRATEGY KEY_GENERIC strategy ) {
+		this( c, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set  with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new hash set. 
+	 */
+	 
+	public OPEN_HASH_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) {
+		this( c, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Creates a new hash set using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_ITERATOR KEY_GENERIC i, final float f, final STRATEGY KEY_GENERIC strategy ) {
+		this( DEFAULT_INITIAL_SIZE, f, strategy );
+		while( i.hasNext() ) add( i.NEXT_KEY() );
+	}
+#else
+	/** Creates a new hash set using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_ITERATOR KEY_GENERIC i, final float f ) {
+		this( DEFAULT_INITIAL_SIZE, f );
+		while( i.hasNext() ) add( i.NEXT_KEY() );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_ITERATOR KEY_GENERIC i, final STRATEGY KEY_GENERIC strategy ) {
+		this( i, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_ITERATOR KEY_GENERIC i ) {
+		this( i, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+
+#if #keys(primitive)
+
+#ifdef Custom
+	/** Creates a new hash set using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final Iterator<?> i, final float f, final STRATEGY KEY_GENERIC strategy ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ), f, strategy );
+	}
+#else
+	/** Creates a new hash set using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_SET( final Iterator<?> i, final float f ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ), f );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final Iterator<?> i, final STRATEGY KEY_GENERIC strategy ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ), strategy );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 */
+	 
+	public OPEN_HASH_SET( final Iterator<?> i ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ) );
+	}
+#endif
+
+#endif
+
+
+
+#ifdef Custom
+	/** Creates a new hash set and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final float f, final STRATEGY KEY_GENERIC strategy ) {
+	this( length < 0 ? 0 : length, f, strategy );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		for( int i = 0; i < length; i++ ) add( a[ offset + i ] );
+	}
+#else
+	/** Creates a new hash set and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final float f ) {
+		this( length < 0 ? 0 : length, f );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		for( int i = 0; i < length; i++ ) add( a[ offset + i ] );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final STRATEGY KEY_GENERIC strategy ) {
+		this( a, offset, length, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) {
+		this( a, offset, length, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash set. 
+	 * @param f the load factor.
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final float f, final STRATEGY KEY_GENERIC strategy ) {
+		this( a, 0, a.length, f, strategy );
+	}
+#else
+	/** Creates a new hash set copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash set. 
+	 * @param f the load factor.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final float f ) {
+		this( a, 0, a.length, f );
+	}
+#endif
+
+#ifdef Custom
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash set. 
+	 * @param strategy the strategy.
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a, final STRATEGY KEY_GENERIC strategy ) {
+		this( a, DEFAULT_LOAD_FACTOR, strategy );
+	}
+#else
+	/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor 
+	 * copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new hash set. 
+	 */
+	 
+	public OPEN_HASH_SET( final KEY_GENERIC_TYPE[] a ) {
+		this( a, DEFAULT_LOAD_FACTOR );
+	}
+#endif
+
+
+#ifdef Custom
+	/** Returns the hashing strategy.
+	 *
+	 * @return the hashing strategy of this custom hash set.
+	 */
+
+	public STRATEGY KEY_GENERIC strategy() {
+		return strategy;
+	}
+#endif
+
+	/*
+	 * The following methods implements some basic building blocks used by
+	 * all accessors. They are (and should be maintained) identical to those used in HashMap.drv.
+	 */
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) ) return false;
+			pos = ( pos + 1 ) & mask;
+		}
+
+		used[ pos ] = true;
+		key[ pos ] = k;
+
+#ifdef Linked
+		if ( size == 0 ) {
+			first = last = pos;
+			// Special case of SET(link[ pos ], -1, -1);
+			link[ pos ] = -1L;
+		}
+		else {
+			SET_NEXT( link[ last ], pos );
+			SET_UPPER_LOWER( link[ pos ], last, -1 );
+			last = pos;
+		}
+#endif
+
+		if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
+		if ( ASSERTS ) checkTable();
+		return true;
+	}
+
+	/** Shifts left entries with the specified hash code, starting at the specified position,
+	 * and empties the resulting free entry.
+	 *
+	 * @param pos a starting position.
+	 * @return the position cleared by the shifting process.
+	 */
+	protected final int shiftKeys( int pos ) {
+		// Shift entries with the same hash.
+		int last, slot;
+
+		for(;;) {
+			pos = ( ( last = pos ) + 1 ) & mask;
+			
+			while( used[ pos ] ) {
+				slot = KEY2INTHASH( key[ pos ] ) & mask;
+				if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
+				pos = ( pos + 1 ) & mask;
+			}
+
+			if ( ! used[ pos ] ) break;
+
+			key[ last ] = key[ pos ];			
+
+#ifdef Linked
+			fixPointers( pos, last );
+#endif
+		}
+
+		used[ last ] = false;
+#if #keys(reference)
+		key[ last ] = null;
+#endif
+		return last;
+	}
+
+
+	@SuppressWarnings("unchecked")
+	public boolean remove( final KEY_TYPE k ) {
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) ) {
+				size--;
+#ifdef Linked
+				fixPointers( pos );
+#endif
+				shiftKeys( pos );
+				if ( ASSERTS ) checkTable();
+				return true;
+			}
+			pos = ( pos + 1 ) & mask;
+		}
+		return false;
+	}
+	 
+	@SuppressWarnings("unchecked")
+	public boolean contains( final KEY_TYPE k ) {
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) ) return true;
+			pos = ( pos + 1 ) & mask;
+		}
+		return false;
+	}
+
+#if #keyclass(Object)
+	/** Returns the element of this set that is equal to the given key, or <code>null</code>.
+	 * @return the element of this set that is equal to the given key, or <code>null</code>.
+	 */
+	@SuppressWarnings("unchecked")
+	public K get( final Object k ) {
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( key[ pos ], k ) ) return key[ pos ];
+			pos = ( pos + 1 ) & mask;
+		}
+		return null;
+	}
+#endif
+
+#ifdef Linked
+	
+	/** Removes the first key in iteration order.
+	 * @return the first key.
+	 * @throws NoSuchElementException is this set is empty.
+	 */
+	public KEY_GENERIC_TYPE REMOVE_FIRST_KEY() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		--size;
+		final int pos = first;
+		// Abbreviated version of fixPointers(pos)
+		first = GET_NEXT(link[ pos ]);
+		if ( 0 <= first ) {
+			// Special case of SET_PREV( link[ first ], -1 )
+			link[ first ] |= (-1 & 0xFFFFFFFFL) << 32;
+		}
+		final KEY_GENERIC_TYPE k = key[ pos ];
+		shiftKeys( pos );	
+		return k;
+	}
+
+	/** Removes the the last key in iteration order.
+	 * @return the last key.
+	 * @throws NoSuchElementException is this set is empty.
+	 */
+	public KEY_GENERIC_TYPE REMOVE_LAST_KEY() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		--size;
+		final int pos = last;
+		// Abbreviated version of fixPointers(pos)
+		last = GET_PREV(link[ pos ]);
+		if ( 0 <= last ) {
+			// Special case of SET_NEXT( link[ last ], -1 )
+			link[ last ] |= -1 & 0xFFFFFFFFL;
+		}
+		final KEY_GENERIC_TYPE k = key[ pos ];
+		shiftKeys( pos );
+		return k;
+	}
+
+	private void moveIndexToFirst( final int i ) {
+		if ( size == 1 || first == i ) return;
+		if ( last == i ) {
+			last = GET_PREV(link[ i ]);
+			// Special case of SET_NEXT( link[ last ], -1 );
+			link[ last ] |= -1 & 0xFFFFFFFFL;
+		}
+		else {
+			final long linki = link[ i ];
+			final int prev = GET_PREV(linki);
+			final int next = GET_NEXT(linki);
+			COPY_NEXT(link[ prev ], linki);
+			COPY_PREV(link[ next ], linki);
+		}
+		SET_PREV( link[ first ], i );
+		SET_UPPER_LOWER( link[ i ], -1, first );
+		first = i;
+	}
+
+	private void moveIndexToLast( final int i ) {
+		if ( size == 1 ||  last == i ) return;
+		if ( first == i ) {
+			first = GET_NEXT(link[ i ]);
+			// Special case of SET_PREV( link[ first ], -1 );
+			link[ first ] |= (-1 & 0xFFFFFFFFL) << 32;
+		}
+		else {
+			final long linki = link[ i ];
+			final int prev = GET_PREV(linki);
+			final int next = GET_NEXT(linki);
+			COPY_NEXT(link[ prev ], linki);
+			COPY_PREV(link[ next ], linki);
+		}
+		SET_NEXT( link[ last ], i );
+		SET_UPPER_LOWER( link[ i ], last, -1 );
+		last = i;
+	}
+
+	/** Adds a key to the set; if the key is already present, it is moved to the first position of the iteration order.
+	 *
+	 * @param k the key.
+	 * @return true if the key was not present.
+	 */
+	public boolean addAndMoveToFirst( final KEY_GENERIC_TYPE k ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final boolean used[] = this.used;
+		final int mask = this.mask;
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( k, key[ pos ] ) ) {
+				moveIndexToFirst( pos );
+				return false;
+			}
+			
+			pos = ( pos + 1 ) & mask;
+		}
+		  
+		used[ pos ] = true;
+		key[ pos ] = k;
+		
+		if ( size == 0 ) {
+			first = last = pos;
+			// Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 );
+			link[ pos ] = -1L;
+		}
+		else {
+			SET_PREV( link[ first ], pos );
+			SET_UPPER_LOWER( link[ pos ], -1, first );
+			first = pos;
+		}
+
+		if ( ++size >= maxFill ) rehash( arraySize( size, f ) );
+		if ( ASSERTS ) checkTable();
+		return true;
+	}
+
+	/** Adds a key to the set; if the key is already present, it is moved to the last position of the iteration order.
+	 *
+	 * @param k the key.
+	 * @return true if the key was not present.
+	 */
+	public boolean addAndMoveToLast( final KEY_GENERIC_TYPE k ) {
+		final KEY_GENERIC_TYPE key[] = this.key;
+		final boolean used[] = this.used;
+		final int mask = this.mask;
+
+		// The starting point.
+		int pos = KEY2INTHASH( k ) & mask;
+
+		// There's always an unused entry.
+		while( used[ pos ] ) {
+			if ( KEY_EQUALS( k, key[ pos ] ) ) {
+				moveIndexToLast( pos );
+				return false;
+			}
+			
+			pos = ( pos + 1 ) & mask;
+		}
+		  
+		used[ pos ] = true;
+		key[ pos ] = k;
+		
+		if ( size == 0 ) {
+			first = last = pos;
+			// Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 );
+			link[ pos ] = -1L;
+		}
+		else {
+			SET_NEXT( link[ last ], pos );
+			SET_UPPER_LOWER( link[ pos ], last, -1 );
+			last = pos;
+		}
+
+		if ( ++size >= maxFill ) rehash( arraySize( size, f ) );
+		if ( ASSERTS ) checkTable();
+		return true;
+	}
+
+#endif
+
+	/* Removes all elements from this set.
+	 *
+	 * <P>To increase object reuse, this method does not change the table size.
+	 * If you want to reduce the table size, you must use {@link #trim()}.
+	 *
+	 */
+
+	public void clear() {
+		if ( size == 0 ) return;
+		size = 0;
+		BooleanArrays.fill( used, false );
+
+#if #keys(reference)
+		ObjectArrays.fill( key, null );
+#endif
+#ifdef Linked
+		first = last = -1;
+#endif
+	}
+
+
+	public int size() {
+		return size;
+	}
+
+	public boolean isEmpty() {
+		return size == 0;
+	}
+
+	/** A no-op for backward compatibility.
+	 * 
+	 * @param growthFactor unused.
+	 * @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
+	 */
+	@Deprecated
+	public void growthFactor( int growthFactor ) {}
+
+
+	/** Gets the growth factor (2).
+	 *
+	 * @return the growth factor of this set, which is fixed (2).
+	 * @see #growthFactor(int)
+	 * @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
+	 */
+	@Deprecated
+	public int growthFactor() {
+		return 16;
+	}
+
+
+
+#ifdef Linked
+
+	/** Modifies the {@link #link} vector so that the given entry is removed.
+	 *
+	 * <P>If the given entry is the first or the last one, this method will complete
+	 * in constant time; otherwise, it will have to search for the given entry.
+	 *
+	 * @param i the index of an entry. 
+	 */
+	protected void fixPointers( final int i ) {
+		if ( size == 0 ) {
+			first = last = -1;
+			return;
+		}
+		if ( first == i ) {
+			first = GET_NEXT(link[ i ]);
+			if (0 <= first) {
+				// Special case of SET_PREV( link[ first ], -1 )
+				link[ first ] |= (-1 & 0xFFFFFFFFL) << 32;
+			}
+			return;
+		}
+		if ( last == i ) {
+			last = GET_PREV(link[ i ]);
+			if (0 <= last) {
+				// Special case of SET_NEXT( link[ last ], -1 )
+				link[ last ] |= -1 & 0xFFFFFFFFL;
+			}
+			return;
+		}
+		final long linki = link[ i ];
+		final int prev = GET_PREV(linki);
+		final int next = GET_NEXT(linki);
+		COPY_NEXT(link[ prev ], linki);
+		COPY_PREV(link[ next ], linki);
+	}
+
+
+	/** Modifies the {@link #link} vector for a shift from s to d.
+	 *
+	 * <P>If the given entry is the first or the last one, this method will complete
+	 * in constant time; otherwise, it will have to search for the given entry.
+	 *
+	 * @param s the source position.
+	 * @param d the destination position.
+	 */
+	protected void fixPointers( int s, int d ) {
+		if ( size == 1 ) {
+			first = last = d;
+			// Special case of SET(link[ d ], -1, -1)
+			link[ d ] = -1L;
+			return;
+		}
+		if ( first == s ) {
+			first = d;
+			SET_PREV( link[ GET_NEXT(link[ s ]) ], d );
+			link[ d ] = link[ s ];
+			return;
+		}
+		if ( last == s ) {
+			last = d;
+			SET_NEXT( link[ GET_PREV(link[ s ])], d );
+			link[ d ] = link[ s ];
+			return;
+		}
+		final long links = link[ s ];
+		final int prev = GET_PREV(links);
+		final int next = GET_NEXT(links);
+		SET_NEXT( link[ prev ], d );
+		SET_PREV( link[ next ], d );
+		link[ d ] = links;
+	}
+
+
+	/** Returns the first element of this set in iteration order.
+	 *
+	 * @return the first element in iteration order.
+	 */
+	public KEY_GENERIC_TYPE FIRST() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		return key[ first ];
+	}
+
+
+	/** Returns the last element of this set in iteration order.
+	 *
+	 * @return the last element in iteration order.
+	 */
+	public KEY_GENERIC_TYPE LAST() {
+		if ( size == 0 ) throw new NoSuchElementException();
+		return key[ last ];
+	}
+
+
+	public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_TYPE from ) { throw new UnsupportedOperationException(); }
+	public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+	public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { throw new UnsupportedOperationException(); }
+	
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; }
+
+
+
+	/** A list iterator over a linked set.
+	 *
+	 * <P>This class provides a list iterator over a linked hash set. The empty constructor runs in 
+	 * constant time. The one-argument constructor needs to search for the given element, but it is 
+	 * optimized for the case of {@link java.util.SortedSet#last()}, in which case runs in constant time, too.
+	 */
+	private class SetIterator extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC {
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). */
+		int prev = -1;
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). */
+		int next = -1;
+		/** The last entry that was returned (or -1 if we did not iterate or used {@link #remove()}). */
+		int curr = -1;
+		/** The current index (in the sense of a {@link java.util.ListIterator}). When -1, we do not know the current index.*/
+		int index = -1;
+
+		SetIterator() {
+			next = first;
+			index = 0;
+		}
+
+		SetIterator( KEY_GENERIC_TYPE from ) {
+			if ( KEY_EQUALS( key[ last ], from ) ) {
+				prev = last;
+				index = size;
+			}
+			else {
+				// The starting point.
+				int pos = KEY2INTHASH( from ) & mask;
+
+				// There's always an unused entry.
+				while( used[ pos ] ) {
+					if ( KEY_EQUALS( key[ pos ], from ) ) {
+						// Note: no valid index known.
+						next = GET_NEXT( link[ pos ] );
+						prev = pos;
+						return;
+					}
+					pos = ( pos + 1 ) & mask;
+				}
+				throw new NoSuchElementException( "The key " + from + " does not belong to this set." );
+			}
+		}
+					 
+		public boolean hasNext() { return next != -1; }
+		public boolean hasPrevious() { return prev != -1; }
+					 
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+
+			curr = next;
+			next = GET_NEXT(link[ curr ]);
+			prev = curr;
+
+			if ( index >= 0 ) index++;
+			if ( ASSERTS ) assert used[ curr ] : "Position " + curr + " is not used";
+			return key[ curr ];
+		}
+
+		public KEY_GENERIC_TYPE PREV_KEY() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+
+			curr = prev;
+			prev = GET_PREV(link[ curr ]);
+			next = curr;
+
+			if ( index >= 0 ) index--;
+
+			return key[ curr ];
+		}
+
+		private final void ensureIndexKnown() {
+			if ( index >= 0 ) return;
+			if ( prev == -1 ) {
+				index = 0;
+				return;
+			}
+			if ( next == -1 ) {
+				index = size;
+				return;
+			}
+			int pos = first;
+			index = 1;
+			while( pos != prev ) {
+				pos = GET_NEXT( link[ pos ] );
+				index++;
+			}
+		}
+
+		public int nextIndex() {
+			ensureIndexKnown();
+			return index;
+		}
+
+		public int previousIndex() {
+			ensureIndexKnown();
+			return index - 1;
+		}
+
+		
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			ensureIndexKnown();
+			if ( curr == -1 ) throw new IllegalStateException();
+
+			if ( curr == prev ) {
+				/* If the last operation was a next(), we are removing an entry that preceeds
+				   the current index, and thus we must decrement it. */
+				index--;
+				prev = GET_PREV(link[ curr ]);
+			}
+			else 
+				next = GET_NEXT(link[ curr ]);
+
+			size--;
+			/* Now we manually fix the pointers. Because of our knowledge of next
+			   and prev, this is going to be faster than calling fixPointers(). */
+			if ( prev == -1 ) first = next;
+			else 
+				SET_NEXT( link[ prev ], next );
+			if ( next == -1 ) last = prev;
+			else
+				SET_PREV( link[ next ], prev );
+
+			int last, slot, pos = curr;
+
+			// We have to horribly duplicate the shiftKeys() code because we need to update next/prev.			
+			for(;;) {
+				pos = ( ( last = pos ) + 1 ) & mask;
+				while( used[ pos ] ) {
+					slot = KEY2INTHASH( key[ pos ] ) & mask;
+					if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
+					pos = ( pos + 1 ) & mask;
+				}
+	
+				if ( ! used[ pos ] ) break;
+				key[ last ] = key[ pos ];			
+				if ( next == pos ) next = last;
+				if ( prev == pos ) prev = last;
+				fixPointers( pos, last );
+			}
+
+			used[ last ] = false;
+#if #keys(reference)
+			key[ last ] = null;
+#endif
+			curr = -1;
+		}
+	}
+
+
+	/** Returns a type-specific list iterator on the elements in this set, starting from a given element of the set.
+	 * Please see the class documentation for implementation details.
+	 *
+	 * @param from an element to start from.
+	 * @return a type-specific list iterator starting at the given element.
+	 * @throws IllegalArgumentException if <code>from</code> does not belong to the set.
+	 */
+	public KEY_LIST_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE from ) {
+		return new SetIterator( from );
+	}
+
+	public KEY_LIST_ITERATOR KEY_GENERIC iterator() {
+		return new SetIterator();
+	}
+
+#else	 
+
+	/** An iterator over a hash set. */
+
+	private class SetIterator extends KEY_ABSTRACT_ITERATOR KEY_GENERIC {
+		/** The index of the next entry to be returned, if positive or zero. If negative, the next entry to be
+			returned, if any, is that of index -pos -2 from the {@link #wrapped} list. */
+		int pos = OPEN_HASH_SET.this.n;
+		/** The index of the last entry that has been returned (more precisely, the value of {@link #pos}). It is -1 if either
+			we did not return an entry yet, or the last returned entry has been removed. */
+		int last = -1;
+		/** A downward counter measuring how many entries must still be returned. */
+		int c = size;
+		/** A lazily allocated list containing elements that have wrapped around the table because of removals; such elements
+			would not be enumerated (other elements would be usually enumerated twice in their place). */
+		ARRAY_LIST KEY_GENERIC wrapped;
+		
+		{ 
+			final boolean used[] = OPEN_HASH_SET.this.used;
+			if ( c != 0 ) while( ! used[ --pos ] );
+		}
+
+		public boolean hasNext() {
+			return c != 0;
+		}
+
+		public KEY_GENERIC_TYPE NEXT_KEY() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			
+			c--;
+			// We are just enumerating elements from the wrapped list.
+			if ( pos < 0 ) return wrapped.GET_KEY( - ( last = --pos ) - 2  );
+			
+			final KEY_GENERIC_TYPE retVal = key[ last = pos ];
+			
+			//System.err.println( "Count: " + c );
+			if ( c != 0 ) {
+				final boolean used[] = OPEN_HASH_SET.this.used;
+				while ( pos-- != 0 && !used[ pos ] );
+				// When here pos < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty.
+			}
+			
+			return retVal;
+		}
+
+
+		/** Shifts left entries with the specified hash code, starting at the specified position,
+		 * and empties the resulting free entry. If any entry wraps around the table, instantiates
+		 * lazily {@link #wrapped} and stores the entry.
+		 *
+		 * @param pos a starting position.
+		 * @return the position cleared by the shifting process.
+		 */
+		final int shiftKeys( int pos ) {
+			// Shift entries with the same hash.
+			int last, slot;
+	
+			for(;;) {
+				pos = ( ( last = pos ) + 1 ) & mask;
+				
+				while( used[ pos ] ) {
+					slot = KEY2INTHASH( key[ pos ] ) & mask;
+					if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
+					pos = ( pos + 1 ) & mask;
+				}
+	
+				if ( ! used[ pos ] ) break;
+				if ( pos < last ) {
+					// Wrapped entry.
+					if ( wrapped == null ) wrapped = new ARRAY_LIST KEY_GENERIC();
+					wrapped.add( key[ pos ] );
+				}
+				key[ last ] = key[ pos ];			
+			}
+	
+			used[ last ] = false;
+#if #keys(reference)
+			key[ last ] = null;
+#endif
+			return last;
+		}
+	
+		@SuppressWarnings("unchecked")
+		public void remove() {
+			if ( last == -1 ) throw new IllegalStateException();
+			if ( pos < -1 ) {
+				// We're removing wrapped entries.
+#if #keys(reference)
+				OPEN_HASH_SET.this.remove( wrapped.set( - pos - 2, null ) );
+#else
+				OPEN_HASH_SET.this.remove( wrapped.GET_KEY( - pos - 2 ) );
+#endif
+				last = -1;
+				return;
+			}
+			size--;
+			if ( shiftKeys( last ) == pos && c > 0 ) {
+				c++;
+				NEXT_KEY();
+			}
+			last = -1; // You can no longer remove this entry.
+			if ( ASSERTS ) checkTable();
+		}
+	}
+
+	public KEY_ITERATOR KEY_GENERIC iterator() {
+		return new SetIterator();
+	}
+
+#endif
+
+
+
+	/** A no-op for backward compatibility. The kind of tables implemented by
+	 * this class never need rehashing.
+	 *
+	 * <P>If you need to reduce the table size to fit exactly
+	 * this set, use {@link #trim()}.
+	 *
+	 * @return true.
+	 * @see #trim()
+	 * @deprecated A no-op.
+	 */
+
+	@Deprecated
+	public boolean rehash() {
+		return true;
+	}
+
+
+	/** Rehashes this set, making the table as small as possible.
+	 * 
+	 * <P>This method rehashes the table to the smallest size satisfying the
+	 * load factor. It can be used when the set will not be changed anymore, so
+	 * to optimize access speed and size.
+	 *
+	 * <P>If the table size is already the minimum possible, this method
+	 * does nothing.
+	 *
+	 * @return true if there was enough memory to trim the set.
+	 * @see #trim(int)
+	 */
+
+	public boolean trim() {
+		final int l = arraySize( size, f );
+		if ( l >= n ) return true;
+		try {
+			rehash( l );
+		}
+		catch(OutOfMemoryError cantDoIt) { return false; }
+		return true;
+	}
+
+
+	/** Rehashes this set if the table is too large.
+	 * 
+	 * <P>Let <var>N</var> be the smallest table size that can hold
+	 * <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current
+	 * table size is smaller than or equal to <var>N</var>, this method does
+	 * nothing. Otherwise, it rehashes this set in a table of size
+	 * <var>N</var>.
+	 *
+	 * <P>This method is useful when reusing sets.  {@linkplain #clear() Clearing a
+	 * set} leaves the table size untouched. If you are reusing a set
+	 * many times, you can call this method with a typical
+	 * size to avoid keeping around a very large table just
+	 * because of a few large transient sets.
+	 *
+	 * @param n the threshold for the trimming.
+	 * @return true if there was enough memory to trim the set.
+	 * @see #trim()
+	 */
+
+	public boolean trim( final int n ) {
+		final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) );
+		if ( this.n <= l ) return true;
+		try {
+			rehash( l );
+		}
+		catch( OutOfMemoryError cantDoIt ) { return false; }
+		return true;
+	}
+
+	/** Resizes the set.
+	 *
+	 * <P>This method implements the basic rehashing strategy, and may be
+	 * overriden by subclasses implementing different rehashing strategies (e.g.,
+	 * disk-based rehashing). However, you should not override this method
+	 * unless you understand the internal workings of this class.
+	 *
+	 * @param newN the new size
+	 */
+
+	@SuppressWarnings("unchecked")
+	protected void rehash( final int newN ) {
+#ifdef Linked
+		int i = first, prev = -1, newPrev = -1, t, pos;
+#else
+		int i = 0, pos;
+		final boolean used[] = this.used;
+#endif
+
+		KEY_GENERIC_TYPE k;
+		final KEY_GENERIC_TYPE key[] = this.key;
+
+		final int newMask = newN - 1;
+		final KEY_GENERIC_TYPE newKey[] = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ newN ];
+		final boolean newUsed[] = new boolean[ newN ];
+#ifdef Linked
+		final long link[] = this.link;
+		final long newLink[] = new long[ newN ];
+		first = -1;
+#endif
+
+		for( int j = size; j-- != 0; ) {
+
+#ifndef Linked
+			while( ! used[ i ] ) i++;
+#endif
+
+			k = key[ i ];
+			pos = KEY2INTHASH( k ) & newMask;
+
+			while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask;
+				
+			newUsed[ pos ] = true;
+			newKey[ pos ] = k;
+
+#ifdef Linked
+			if ( prev != -1 ) {
+				SET_NEXT( newLink[ newPrev ], pos );
+				SET_PREV( newLink[ pos ], newPrev );
+				newPrev = pos;
+			}
+			else {
+				newPrev = first = pos;
+				// Special case of SET(newLink[ pos ], -1, -1);
+				newLink[ pos ] = -1L;
+			}
+
+			t = i;
+			i = GET_NEXT(link[ i ]);
+			prev = t;
+
+#else
+			i++;
+#endif
+		}
+
+		n = newN;
+		mask = newMask;
+		maxFill = maxFill( n, f );
+		this.key = newKey;
+		this.used = newUsed;
+#ifdef Linked
+		this.link = newLink;
+		this.last = newPrev;
+		if ( newPrev != -1 )
+			// Special case of SET_NEXT( newLink[ newPrev ], -1 );
+			newLink[ newPrev ] |= -1 & 0xFFFFFFFFL;
+#endif
+	}
+
+
+	/** Returns a deep copy of this set. 
+	 *
+	 * <P>This method performs a deep copy of this hash set; the data stored in the
+	 * set, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 *  @return a deep copy of this set.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public OPEN_HASH_SET KEY_GENERIC clone() {
+		OPEN_HASH_SET KEY_GENERIC c;
+		try {
+			c = (OPEN_HASH_SET KEY_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+		c.key = key.clone();
+		c.used = used.clone();
+#ifdef Linked
+		c.link = link.clone();
+#endif
+#ifdef Custom
+		c.strategy = strategy;
+#endif
+		return c;
+	}
+
+	/** Returns a hash code for this set.
+	 *
+	 * This method overrides the generic method provided by the superclass. 
+	 * Since <code>equals()</code> is not overriden, it is important
+	 * that the value returned by this method is the same value as
+	 * the one returned by the overriden method.
+	 *
+	 * @return a hash code for this set.
+	 */
+
+
+	public int hashCode() {
+		int h = 0, i = 0, j = size;
+		while( j-- != 0 ) {
+			while( ! used[ i ] ) i++;
+#if #keys(reference)
+			if ( this != key[ i ] )
+#endif
+				h += KEY2JAVAHASH( key[ i ] );
+			i++;
+		}
+		return h;
+	}
+
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		final KEY_ITERATOR KEY_GENERIC i = iterator();
+		s.defaultWriteObject();
+		for( int j = size; j-- != 0; ) s.WRITE_KEY( i.NEXT_KEY() );
+	}
+
+
+	@SuppressWarnings("unchecked")
+	private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+
+		n = arraySize( size, f );
+		maxFill = maxFill( n, f );
+		mask = n - 1;
+		
+		final KEY_GENERIC_TYPE key[] = this.key = KEY_GENERIC_ARRAY_CAST new KEY_TYPE[ n ];
+		final boolean used[] = this.used = new boolean[ n ];
+#ifdef Linked
+		final long link[] = this.link = new long[ n ];
+		int prev = -1;
+		first = last = -1;
+#endif
+
+		KEY_GENERIC_TYPE k;
+
+		for( int i = size, pos = 0; i-- != 0; ) {
+
+			k = KEY_GENERIC_CAST s.READ_KEY();
+			pos = KEY2INTHASH( k ) & mask;
+
+			while ( used[ pos ] ) pos = ( pos + 1 ) & mask;
+
+			used[ pos ] = true;
+			key[ pos ] = k;
+
+#ifdef Linked
+			if ( first != -1 ) {
+				SET_NEXT( link[ prev ], pos );
+				SET_PREV( link[ pos ], prev );
+				prev = pos;
+			}
+			else {
+				prev = first = pos;
+				// Special case of SET_PREV( newLink[ pos ], -1 );
+				link[ pos ] |= (-1L & 0xFFFFFFFFL) << 32;
+			}
+#endif
+		}
+
+#ifdef Linked
+		last = prev;
+		if ( prev != -1 ) 
+			// Special case of SET_NEXT( link[ prev ], -1 );
+			link[ prev ] |= -1 & 0xFFFFFFFFL; 
+#endif
+
+		if ( ASSERTS ) checkTable();
+	}
+
+
+#ifdef ASSERTS_CODE
+	private void checkTable() {
+		final boolean[] used = this.used;
+		assert ( n & -n ) == n : "Table length is not a power of two: " + n;
+		assert n == key.length;
+		assert n == used.length;
+		int n = used.length;
+		while( n-- != 0 ) 
+			if ( used[ n ] && ! contains( key[ n ] ) ) 
+				throw new AssertionError( "Hash table has key " + key[ n ] + " marked as occupied, but the key does not belong to the table" );
+
+#if #keys(primitive)
+		java.util.HashSet<KEY_GENERIC_CLASS> s = new java.util.HashSet<KEY_GENERIC_CLASS> ();
+#else
+		java.util.HashSet<Object> s = new java.util.HashSet<Object>();
+#endif
+		
+		for( int i = used.length; i-- != 0; )
+			if ( used[ i ] && ! s.add( key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice at position " + i );
+
+#ifdef Linked
+		KEY_LIST_ITERATOR KEY_GENERIC i = iterator();
+		KEY_GENERIC_TYPE k;
+		n = size();
+		while( n-- != 0 ) 
+			if ( ! contains( k = i.NEXT_KEY() ) ) 
+				throw new AssertionError( "Linked hash table forward enumerates key " + k + ", but the key does not belong to the table" );
+
+		if ( i.hasNext() ) throw new AssertionError( "Forward iterator not exhausted" );
+
+		n = size();
+		if ( n > 0 ) {
+			i = iterator( LAST() );
+			while( n-- != 0 ) 
+				if ( ! contains( k = i.PREV_KEY() ) ) 
+					throw new AssertionError( "Linked hash table backward enumerates key " + k + ", but the key does not belong to the table" );
+			
+			if ( i.hasPrevious() ) throw new AssertionError( "Previous iterator not exhausted" );
+		}
+#endif
+	}
+#else
+	private void checkTable() {}
+#endif
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+#ifdef Custom
+		int i = r.nextInt( 3 );
+		byte a[] = new byte[ i ];
+		while( i-- != 0 ) a[ i ] = (byte)r.nextInt();
+		return a;
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static final class ArrayComparator implements java.util.Comparator {
+		public int compare( Object a, Object b ) {
+			byte[] aa = (byte[])a;
+			byte[] bb = (byte[])b;
+			int length = Math.min( aa.length, bb.length );
+			for( int i = 0; i < length; i++ ) {
+				if ( aa[ i ] < bb[ i ] ) return -1;
+				if ( aa[ i ] > bb[ i ] ) return 1;
+			}
+			return aa.length == bb.length ? 0 : ( aa.length < bb.length ? -1 : 1 );
+		}
+	}
+
+	private static final class MockSet extends java.util.TreeSet {
+		private java.util.List list = new java.util.ArrayList();
+
+		public MockSet( java.util.Comparator c ) { super( c ); }
+
+		public boolean add( Object k ) {
+			if ( ! contains( k ) ) list.add( k );
+			return super.add( k );
+		}
+
+		public boolean addAll( Collection c ) {
+			java.util.Iterator i = c.iterator();
+			boolean result = false;
+			while( i.hasNext() ) result |= add( i.next() );
+			return result;
+		}
+
+		public boolean removeAll( Collection c ) {
+			java.util.Iterator i = c.iterator();
+			boolean result = false;
+			while( i.hasNext() ) result |= remove( i.next() );
+			return result;
+		}
+
+		public boolean remove( Object k ) {
+			if ( contains( k ) ) {
+				int i = list.size();
+				while( i-- != 0 ) if ( comparator().compare( list.get( i ), k ) == 0 ) {
+					list.remove( i );
+					break;
+				}
+			}
+			return super.remove( k );
+		}
+
+		private void justRemove( Object k ) { super.remove( k ); }
+
+		public java.util.Iterator iterator() {
+			return new java.util.Iterator() {
+					final java.util.Iterator iterator = list.iterator();
+					Object curr;
+					public Object next() { return curr = iterator.next(); }
+					public boolean hasNext() { return iterator.hasNext(); }
+					public void remove() { 
+						justRemove( curr );
+						iterator.remove(); 
+					}
+				};
+		}
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+	private static void speedTest( int n, float f, boolean comp ) {
+#ifndef Custom
+		int i, j;
+		OPEN_HASH_SET m;
+#ifdef Linked
+		java.util.LinkedHashSet t;
+#else
+		java.util.HashSet t;
+#endif
+
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+		}
+		  
+		double totAdd = 0, totYes = 0, totNo = 0, totIter = 0, totRemYes = 0, totRemNo = 0, d;
+
+		if ( comp ) { for( j = 0; j < 20; j++ ) {
+
+#ifdef Linked
+			t = new java.util.LinkedHashSet( 16 );
+#else
+			t = new java.util.HashSet( 16 );
+#endif
+
+			/* We add pairs to t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.add( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totAdd += d; 				
+			System.out.print("Add: " + format( d ) +" K/s " );
+
+			/* We check for pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on t. */
+			ms = System.currentTimeMillis();
+			for( java.util.Iterator it = t.iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 				
+			System.out.print("Iter: " + format( d ) +" K/s " );
+				
+			// Too expensive in the linked case			
+#ifndef Linked
+			/* We delete pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 				
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+	
+			/* We delete pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );
+#endif
+				
+			System.out.println();
+		}
+
+		System.out.println();
+		System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + "K/s" );
+
+		System.out.println();
+
+		totAdd = totYes = totNo = totIter = totRemYes = totRemNo = 0;
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new OPEN_HASH_SET( 16, f );
+
+			/* We add pairs to m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.add( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totAdd += d; 				
+			System.out.print("Add: " + format( d ) +" K/s " );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on m. */
+			ms = System.currentTimeMillis();
+			for( KEY_ITERATOR it = (KEY_ITERATOR)m.iterator(); it.hasNext(); it.NEXT_KEY() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIter += d; 	 
+			System.out.print("Iter: " + format( d ) +" K/s " );
+
+			// Too expensive in the linked case			
+#ifndef Linked
+			/* We delete pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemNo += d; 	
+			System.out.print("RemNo: " + format( d ) +" K/s " );
+
+			/* We delete pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.remove( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totRemYes += d; 				
+			System.out.print("RemYes: " + format( d ) +" K/s " );	 
+#endif
+
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Add: " + format( totAdd/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s Iter: " + format( totIter/(j-3) ) + " K/s RemNo: " + format( totRemNo/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s" );
+
+		System.out.println();
+#endif
+	}
+
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+
+	private static void printProbes( OPEN_HASH_SET m ) {
+		long totProbes = 0;
+		double totSquareProbes = 0;
+		int maxProbes = 0;	
+		final double f = (double)m.size / m.n;
+		for( int i = 0, c = 0; i < m.n; i++ ) {
+			if ( m.used[ i ] ) c++;
+			else {
+				if ( c != 0 ) {
+					final long p = ( c + 1 ) * ( c + 2 ) / 2;
+					totProbes += p;
+					totSquareProbes += (double)p * p;
+				}
+				maxProbes = Math.max( c, maxProbes );
+				c = 0;
+				totProbes++;
+				totSquareProbes++;
+			}
+		}
+
+		final double expected = (double)totProbes / m.n;
+		System.err.println( "Expected probes: " + ( 
+			3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1
+		) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected )  + "; max probes: " + maxProbes );
+	}
+	
+	
+	private static void test( int n, float f ) {
+#if !defined(Custom) || #keys(reference)
+
+		int c;
+#ifdef Custom
+		OPEN_HASH_SET m = new OPEN_HASH_SET(Hash.DEFAULT_INITIAL_SIZE, f, it.unimi.dsi.fastutil.bytes.ByteArrays.HASH_STRATEGY);
+#else
+		OPEN_HASH_SET m = new OPEN_HASH_SET(Hash.DEFAULT_INITIAL_SIZE, f);
+#endif
+#ifdef Linked
+#ifdef Custom
+		java.util.Set t = new MockSet(new ArrayComparator());
+#else
+		java.util.Set t = new java.util.LinkedHashSet();
+#endif
+#else
+#ifdef Custom
+		java.util.Set t = new java.util.TreeSet(new ArrayComparator());
+#else 
+		java.util.Set t = new java.util.HashSet();
+#endif
+#endif
+
+		/* First of all, we fill t with random data. */
+
+		for(int i=0; i<f * n;  i++ ) t.add(KEY2OBJ(genKey()));
+		  
+		/* Now we add to m the same data */
+		  
+		m.addAll(t); 
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after insertion");
+		printProbes( m );
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!m.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after insertion (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		c = 0;		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			c++;
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after insertion (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		if ( c != t.size() ) {
+			System.out.println("Error (" + seed + "): m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)");
+			System.exit( 1 );
+		}
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.contains(T) != t.contains(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in keys between t and m (polymorphic method)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.contains(KEY2OBJ(T)) != t.contains(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence between t and m (standard method)");
+				System.exit( 1 );
+			}
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.add(KEY2OBJ(T)) != t.add(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in add() between t and m");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (m.remove(KEY2OBJ(T)) != t.remove(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after removal");
+
+		/* Now we check that m actually holds that data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!m.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after removal (iterating on t)");
+				System.exit( 1 );
+			}
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after removal (iterating on m)");
+				System.exit( 1 );
+			}
+		}
+
+		printProbes( m );
+
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		KEY_TYPE a[] = m.TO_KEY_ARRAY();
+		  
+#ifdef Custom
+		if (!new OPEN_HASH_SET(a, m.strategy()).equals(m))
+			System.out.println("Error (" + seed + "): toArray() output (or array-based constructor) is not OK");
+#else
+		if (!new OPEN_HASH_SET(a).equals(m))
+			System.out.println("Error (" + seed + "): toArray() output (or array-based constructor) is not OK");
+#endif
+
+		/* Now we check cloning. */
+
+		ensure( m.equals( ((OPEN_HASH_SET)m).clone() ), "Error (" + seed + "): m does not equal m.clone()" );
+		ensure( ((OPEN_HASH_SET)m).clone().equals( m ), "Error (" + seed + "): m.clone() does not equal m" );
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m = (OPEN_HASH_SET)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if !#keyclass(Reference)
+		if (m.hashCode() != h) System.out.println("Error (" + seed + "): hashCode() changed after save/read");
+
+		printProbes( m );
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			Object e = i.next();
+			if (!t.contains(e)) {
+				System.out.println("Error (" + seed + "): m and t differ on a key ("+e+") after save/read");
+				System.exit( 1 );
+			}
+		}
+#else
+		m.clear();
+		m.addAll( t );
+#endif
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			if (m.add(KEY2OBJ(T)) != t.add(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in add() between t and m after save/read");
+				System.exit( 1 );
+			}
+			T = genKey();
+			if (m.remove(KEY2OBJ(T)) != t.remove(KEY2OBJ(T))) {
+				System.out.println("Error (" + seed + "): divergence in remove() between t and m after save/read");
+				System.exit( 1 );
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after post-save/read removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after post-save/read removal");
+
+
+#ifdef Linked
+
+				 
+		/* Now we play with iterators, but only in the linked case. */
+
+		{
+			java.util.ListIterator i, j;
+			Object I, J;
+			i = (java.util.ListIterator)m.iterator(); 
+			j = new java.util.LinkedList( t ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.next(), J = j.next() ), "Error (" + seed + "): divergence in next()" );
+#else
+					ensure( i.next().equals( J = j.next() ), "Error (" + seed + "): divergence in next()" );
+#endif
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.previous(), J = j.previous() ), "Error (" + seed + "): divergence in previous()" );
+#else
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + seed + "): divergence in previous()" );
+#endif
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+
+		if ( t.size() > 0 ) {
+			java.util.ListIterator i, j;
+			Object J;
+			j = new java.util.LinkedList( t ).listIterator(); 
+			int e = r.nextInt( t.size() );
+			Object from;
+			do from = j.next(); while( e-- != 0 );
+
+			i = (java.util.ListIterator)m.iterator( KEY_OBJ2TYPE( from ) ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.next(), J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+#else
+					ensure( i.next().equals( J = j.next() ), "Error (" + seed + "): divergence in next() (iterator with starting point " + from + ")" );
+#endif
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+#ifdef Custom
+					ensure( m.strategy().equals( i.previous(), J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+#else
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + seed + "): divergence in previous() (iterator with starting point " + from + ")" );
+#endif
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + seed + "): divergence in nextIndex() (iterator with starting point " + from + ")" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + seed + "): divergence in previousIndex() (iterator with starting point " + from + ")" );
+
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + seed + "): ! t.equals( m ) after iteration" );
+
+
+
+#endif
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(java.util.Iterator i=m.iterator(); i.hasNext(); ) { i.next(); i.remove();} 
+
+		if (!m.isEmpty())  {
+			System.out.println("Error (" + seed + "): m is not empty (as it should be)");
+			System.exit( 1 );
+		}
+
+#if #keyclass(Integer) || #keyclass(Long)
+		m = new OPEN_HASH_SET(n, f);
+		t.clear();
+		int x;
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		int p = m.used.length;
+
+		for(int i=0; i<p; i++) {
+			for (int j=0; j<20; j++) {
+				m.add(i+(r.nextInt() % 10)*p);
+				m.remove(i+(r.nextInt() % 10)*p);
+			}
+
+			for (int j=-10; j<10; j++) m.remove(i+j*p);
+		}
+		  
+		t.addAll(m);
+
+		/* Now all table entries are REMOVED. */
+ 
+		int k = 0;
+		for(int i=0; i<(p*f)/10; i++) {
+			for (int j=0; j<10; j++) {
+				k++;
+				x = i+(r.nextInt() % 10)*p;
+				if (m.add(x) != t.add(KEY2OBJ(x)))
+					System.out.println("Error (" + seed + "): m and t differ on a key during torture-test insertion.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test insertion");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test insertion");
+
+		for(int i=0; i<(p*f)/10; i++) {
+			for (int j=0; j<10; j++) {
+				x = i+(r.nextInt() % 10)*p;
+				if (m.remove(x) != t.remove(KEY2OBJ(x)))
+					System.out.println("Error (" + seed + "): m and t differ on a key during torture-test removal.");
+			}
+		}
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after torture-test removal");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after torture-test removal");
+
+		if (!m.equals(m.clone())) System.out.println("Error (" + seed + "): !m.equals(m.clone()) after torture-test removal");
+		if (!((OPEN_HASH_SET)m.clone()).equals(m)) System.out.println("Error (" + seed + "): !m.clone().equals(m) after torture-test removal");
+
+		m.rehash();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after rehash()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after rehash()");
+
+		m.trim();
+
+		if (!m.equals(t)) System.out.println("Error (" + seed + "): !m.equals(t) after trim()");
+		if (!t.equals(m)) System.out.println("Error (" + seed + "): !t.equals(m) after trim()");
+#endif
+
+		System.out.println("Test OK");
+		return;
+#endif
+	}
+
+
+	public static void main( String args[] ) {
+		float f = Hash.DEFAULT_LOAD_FACTOR;
+		int n  = Integer.parseInt(args[1]);
+		if (args.length>2) f = Float.parseFloat(args[2]);
+		if ( args.length > 3 ) r = new java.util.Random( seed = Long.parseLong( args[ 3 ] ) );
+		  
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, f, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n, f);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+}
diff --git a/drv/PriorityQueue.drv b/drv/PriorityQueue.drv
new file mode 100644
index 0000000..cac1c40
--- /dev/null
+++ b/drv/PriorityQueue.drv
@@ -0,0 +1,70 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.NoSuchElementException;
+
+import it.unimi.dsi.fastutil.PriorityQueue;
+
+/** A type-specific {@link PriorityQueue}; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>Additionally, this interface strengthens {@link #comparator()}.
+ */
+
+public interface PRIORITY_QUEUE extends PriorityQueue<KEY_CLASS> {
+
+	/** Enqueues a new element.
+	 *
+	 * @param x the element to enqueue.
+	 */
+
+	void enqueue( KEY_GENERIC_TYPE x );
+
+	/** Dequeues the {@linkplain #first() first} element from the queue.
+	 *
+	 * @return the dequeued element.
+	 * @throws NoSuchElementException if the queue is empty.
+	 */
+
+	KEY_GENERIC_TYPE DEQUEUE();
+
+	/** Returns the first element of the queue.
+	 *
+	 * @return the first element.
+	 * @throws NoSuchElementException if the queue is empty.
+	 */
+
+	KEY_GENERIC_TYPE FIRST();
+
+	/** Returns the last element of the queue, that is, the element the would be dequeued last (optional operation).
+	 *
+	 * @return the last element.
+	 * @throws NoSuchElementException if the queue is empty.
+	 */
+
+	KEY_GENERIC_TYPE LAST();
+
+	/** Returns the comparator associated with this sorted set, or null if it uses its elements' natural ordering.
+	 *
+	 * <P>Note that this specification strengthens the one given in {@link PriorityQueue#comparator()}.
+	 *
+	 * @see PriorityQueue#comparator()
+	 */
+
+	KEY_COMPARATOR comparator();
+}
diff --git a/drv/PriorityQueues.drv b/drv/PriorityQueues.drv
new file mode 100644
index 0000000..99dd7b0
--- /dev/null
+++ b/drv/PriorityQueues.drv
@@ -0,0 +1,81 @@
+/*		 
+ * Copyright (C) 2003-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+/** A class providing static methods and objects that do useful things with type-specific priority queues.
+ *
+ * @see it.unimi.dsi.fastutil.PriorityQueue
+ */
+
+public class PRIORITY_QUEUES {
+
+	private PRIORITY_QUEUES() {}
+
+	/** A synchronized wrapper class for priority queues. */
+
+	public static class SynchronizedPriorityQueue KEY_GENERIC implements PRIORITY_QUEUE KEY_GENERIC {
+		final protected PRIORITY_QUEUE KEY_GENERIC q;
+		final protected Object sync;
+
+		protected SynchronizedPriorityQueue( final PRIORITY_QUEUE KEY_GENERIC q, final Object sync ) {
+			this.q = q;
+			this.sync = sync;
+		}
+
+		protected SynchronizedPriorityQueue( final PRIORITY_QUEUE KEY_GENERIC q ) {
+			this.q = q;
+			this.sync = this;
+		}
+
+		public void enqueue( KEY_GENERIC_TYPE x ) { synchronized( sync ) { q.enqueue( x ); } }
+		public KEY_GENERIC_TYPE DEQUEUE() { synchronized( sync ) { return q.DEQUEUE(); } }
+		public KEY_GENERIC_TYPE FIRST() { synchronized( sync ) { return q.FIRST(); } }
+		public KEY_GENERIC_TYPE LAST() { synchronized( sync ) { return q.LAST(); } }
+		public boolean isEmpty() { synchronized( sync ) { return q.isEmpty(); } }
+		public int size() { synchronized( sync ) { return q.size(); } }
+		public void clear() { synchronized( sync ) { q.clear(); } }
+		public void changed() { synchronized( sync ) { q.changed(); } }
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { synchronized( sync ) { return q.comparator(); } }
+#if !#keyclass(Object)
+		public void enqueue( KEY_CLASS x ) { synchronized( sync ) { q.enqueue( x ); } }
+		public KEY_CLASS dequeue() { synchronized( sync ) { return q.dequeue(); } }
+		public KEY_CLASS first() { synchronized( sync ) { return q.first(); } }
+		public KEY_CLASS last() { synchronized( sync ) { return q.last(); } }
+
+#endif
+	}
+
+
+	/** Returns a synchronized type-specific priority queue backed by the specified type-specific priority queue.
+	 *
+	 * @param q the priority queue to be wrapped in a synchronized priority queue.
+	 * @return a synchronized view of the specified priority queue.
+	 */
+	public static KEY_GENERIC PRIORITY_QUEUE KEY_GENERIC synchronize( final PRIORITY_QUEUE KEY_GENERIC q ) {	return new SynchronizedPriorityQueue( q ); }
+
+	/** Returns a synchronized type-specific priority queue backed by the specified type-specific priority queue, using an assigned object to synchronize.
+	 *
+	 * @param q the priority queue to be wrapped in a synchronized priority queue.
+	 * @param sync an object that will be used to synchronize the access to the priority queue.
+	 * @return a synchronized view of the specified priority queue.
+	 */
+
+	public static KEY_GENERIC PRIORITY_QUEUE KEY_GENERIC synchronize( final PRIORITY_QUEUE KEY_GENERIC q, final Object sync ) { return new SynchronizedPriorityQueue( q, sync ); }
+
+
+}
diff --git a/drv/RBTreeMap.drv b/drv/RBTreeMap.drv
new file mode 100644
index 0000000..55665b2
--- /dev/null
+++ b/drv/RBTreeMap.drv
@@ -0,0 +1,2687 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet;
+import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator;
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+import it.unimi.dsi.fastutil.objects.ObjectSortedSet;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+import VALUE_PACKAGE.VALUE_ABSTRACT_COLLECTION;
+import VALUE_PACKAGE.VALUE_ITERATOR;
+
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.NoSuchElementException;
+
+#if #values(primitive)
+import VALUE_PACKAGE.VALUE_LIST_ITERATOR;
+#endif
+
+/** A type-specific red-black tree map with a fast, small-footprint implementation.
+ *
+ * <P>The iterators provided by the views of this class are type-specific {@linkplain
+ * it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}.
+ * Moreover, the iterator returned by <code>iterator()</code> can be safely cast
+ * to a type-specific {@linkplain java.util.ListIterator list iterator}.
+ *
+ */
+
+public class RB_TREE_MAP KEY_VALUE_GENERIC extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable {
+
+	/** A reference to the root entry. */
+	protected transient Entry KEY_VALUE_GENERIC tree;
+
+	/** Number of entries in this map. */
+	protected int count;
+
+	/** The first key in this map. */
+	protected transient Entry KEY_VALUE_GENERIC firstEntry;
+
+	/** The last key in this map. */
+	protected transient Entry KEY_VALUE_GENERIC lastEntry;
+
+	/** Cached set of entries. */
+	protected transient volatile ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> entries;
+
+	/** Cached set of keys. */
+	protected transient volatile SORTED_SET KEY_GENERIC keys;
+
+	/** Cached collection of values. */
+	protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+
+	/** The value of this variable remembers, after a <code>put()</code> 
+	 * or a <code>remove()</code>, whether the <em>domain</em> of the map
+	 * has been modified. */
+	protected transient boolean modified;
+
+	/** This map's comparator, as provided in the constructor. */
+	protected Comparator<? super KEY_GENERIC_CLASS> storedComparator;
+
+	/** This map's actual comparator; it may differ from {@link #storedComparator} because it is
+		always a type-specific comparator, so it could be derived from the former by wrapping. */
+	protected transient KEY_COMPARATOR KEY_SUPER_GENERIC actualComparator;
+
+	private static final long serialVersionUID = -7046029254386353129L;
+
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	{
+		allocatePaths();
+	}
+
+	/** Creates a new empty tree map. 
+	 */
+
+	public RB_TREE_MAP() {
+		tree = null;
+		count = 0;
+	}
+
+	/** Generates the comparator that will be actually used.
+	 *
+	 * <P>When a specific {@link Comparator} is specified and stored in {@link
+	 * #storedComparator}, we must check whether it is type-specific.  If it is
+	 * so, we can used directly, and we store it in {@link #actualComparator}. Otherwise,
+	 * we generate on-the-fly an anonymous class that wraps the non-specific {@link Comparator}
+	 * and makes it into a type-specific one.
+	 */
+	@SuppressWarnings("unchecked")
+	private void setActualComparator() {
+#if #keyclass(Object)
+		actualComparator = storedComparator;
+#else
+		/* If the provided comparator is already type-specific, we use it. Otherwise,
+		   we use a wrapper anonymous class to fake that it is type-specific. */
+		if ( storedComparator == null || storedComparator instanceof KEY_COMPARATOR ) actualComparator = (KEY_COMPARATOR)storedComparator;
+		else actualComparator =	new KEY_COMPARATOR KEY_SUPER_GENERIC() {
+				public int compare( KEY_GENERIC_TYPE k1, KEY_GENERIC_TYPE k2 ) {
+					return storedComparator.compare( KEY2OBJ( k1 ), KEY2OBJ( k2 ) );
+				}
+				public int compare( KEY_GENERIC_CLASS ok1, KEY_GENERIC_CLASS ok2 ) {
+					return storedComparator.compare( ok1, ok2 );
+				}
+			};
+#endif
+	}
+	 
+
+	/** Creates a new empty tree map with the given comparator.
+	 *
+	 * @param c a (possibly type-specific) comparator.
+	 */
+
+	public RB_TREE_MAP( final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this();
+		storedComparator = c;
+		setActualComparator();
+	}
+
+
+	/** Creates a new tree map copying a given map.
+	 *
+	 * @param m a {@link Map} to be copied into the new tree map. 
+	 */
+	 
+	public RB_TREE_MAP( final Map<? extends KEY_GENERIC_CLASS, ? extends VALUE_GENERIC_CLASS> m ) {
+		this();
+		putAll( m );
+	}
+
+	/** Creates a new tree map copying a given sorted map (and its {@link Comparator}).
+	 *
+	 * @param m a {@link SortedMap} to be copied into the new tree map. 
+	 */
+	 
+	public RB_TREE_MAP( final SortedMap<KEY_GENERIC_CLASS,VALUE_GENERIC_CLASS> m ) {
+		this( m.comparator() );
+		putAll( m );
+	}
+
+	/** Creates a new tree map copying a given map.
+	 *
+	 * @param m a type-specific map to be copied into the new tree map. 
+	 */
+	 
+	public RB_TREE_MAP( final MAP KEY_VALUE_EXTENDS_GENERIC m ) {
+		this();
+		putAll( m );
+	}
+
+	/** Creates a new tree map copying a given sorted map (and its {@link Comparator}).
+	 *
+	 * @param m a type-specific sorted map to be copied into the new tree map. 
+	 */
+	 
+	public RB_TREE_MAP( final SORTED_MAP KEY_VALUE_GENERIC m ) {
+		this( m.comparator() );
+		putAll( m );
+	}
+
+	/** Creates a new tree map using the elements of two parallel arrays and the given comparator.
+	 *
+	 * @param k the array of keys of the new tree map.
+	 * @param v the array of corresponding values in the new tree map.
+	 * @param c a (possibly type-specific) comparator.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public RB_TREE_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[], final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this( c );
+		if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
+		for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
+	}
+
+	/** Creates a new tree map using the elements of two parallel arrays.
+	 *
+	 * @param k the array of keys of the new tree map.
+	 * @param v the array of corresponding values in the new tree map.
+	 * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
+	 */
+	 
+	public RB_TREE_MAP( final KEY_GENERIC_TYPE[] k, final VALUE_GENERIC_TYPE v[] ) {
+		this( k, v, null );
+	}
+
+	/*
+	 * The following methods implements some basic building blocks used by
+	 * all accessors.  They are (and should be maintained) identical to those used in RBTreeSet.drv.
+	 *
+	 * The put()/remove() code is derived from Ben Pfaff's GNU libavl
+	 * (http://www.msu.edu/~pfaffben/avl/). If you want to understand what's
+	 * going on, you should have a look at the literate code contained therein
+	 * first.  
+	 */
+
+
+	/** Compares two keys in the right way. 
+	 *
+	 * <P>This method uses the {@link #actualComparator} if it is non-<code>null</code>.
+	 * Otherwise, it resorts to primitive type comparisons or to {@link Comparable#compareTo(Object) compareTo()}.
+	 *
+	 * @param k1 the first key.
+	 * @param k2 the second key.
+	 * @return a number smaller than, equal to or greater than 0, as usual
+	 * (i.e., when k1 < k2, k1 = k2 or k1 > k2, respectively).
+	 */
+	 
+	@SuppressWarnings("unchecked")
+	final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) {
+		return actualComparator == null ? KEY_CMP( k1, k2 ) : actualComparator.compare( k1, k2 );
+	}
+
+
+
+	/** Returns the entry corresponding to the given key, if it is in the tree; <code>null</code>, otherwise.
+	 *
+	 * @param k the key to search for.
+	 * @return the corresponding entry, or <code>null</code> if no entry with the given key exists.
+	 */
+
+	final Entry KEY_VALUE_GENERIC findKey( final KEY_GENERIC_TYPE k ) {
+		Entry KEY_VALUE_GENERIC e = tree;
+		int cmp;
+		 
+		while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) e = cmp < 0 ? e.left() : e.right();
+
+		return e;
+	}
+
+	/** Locates a key.
+	 *
+	 * @param k a key.
+	 * @return the last entry on a search for the given key; this will be
+	 * the given key, if it present; otherwise, it will be either the smallest greater key or the greatest smaller key.
+	 */
+
+	final Entry KEY_VALUE_GENERIC locateKey( final KEY_GENERIC_TYPE k ) {
+		Entry KEY_VALUE_GENERIC e = tree, last = tree;
+		int cmp = 0;
+		  
+		while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) {
+			last = e;
+			e = cmp < 0 ? e.left() : e.right();
+		}
+		  
+		return cmp == 0 ? e : last;
+	}
+
+	/** This vector remembers the path and the direction followed during the
+	 *  current insertion. It suffices for about 2<sup>32</sup> entries. */
+	private transient boolean dirPath[];
+	private transient Entry KEY_VALUE_GENERIC nodePath[];
+
+	@SuppressWarnings("unchecked")
+	private void allocatePaths() {
+		dirPath = new boolean[ 64 ];
+		nodePath = new Entry[ 64 ];
+	}
+
+	/* After execution of this method, modified is true iff a new entry has
+	been inserted. */
+	 
+	public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) {
+		modified = false;
+		int maxDepth = 0;
+
+		if ( tree == null ) { // The case of the empty tree is treated separately.
+			count++;
+			tree = lastEntry = firstEntry = new Entry KEY_VALUE_GENERIC( k, v );
+		}
+		else {
+			Entry KEY_VALUE_GENERIC p = tree, e;
+			int cmp, i = 0;
+
+			while( true ) {
+				if ( ( cmp = compare( k, p.key ) ) == 0 ) {
+					final VALUE_GENERIC_TYPE oldValue = p.value;
+					p.value = v;
+					// We clean up the node path, or we could have stale references later.
+					while( i-- != 0 ) nodePath[ i ] = null;
+					return oldValue;
+				}
+					 
+				nodePath[ i ] = p;
+					 
+				if ( dirPath[ i++ ] = cmp > 0 ) {
+					if ( p.succ() ) {
+						count++;
+						e = new Entry KEY_VALUE_GENERIC( k, v );
+								
+						if ( p.right == null ) lastEntry = e;
+								
+						e.left = p;
+						e.right = p.right;
+								
+						p.right( e );
+								
+						break;
+					}
+
+					p = p.right;
+				}
+				else {
+					if ( p.pred() ) {
+						count++;
+						e = new Entry KEY_VALUE_GENERIC( k, v );
+								
+						if ( p.left == null ) firstEntry = e;
+								
+						e.right = p;
+						e.left = p.left;
+								
+						p.left( e );
+
+						break;
+					}
+
+					p = p.left;
+				}
+			}
+
+			modified = true;
+			maxDepth = i--;
+
+			while( i > 0 && ! nodePath[ i ].black() ) {
+				if ( ! dirPath[ i - 1 ] ) {
+					Entry KEY_VALUE_GENERIC y = nodePath[ i - 1 ].right;
+
+					if ( ! nodePath[ i - 1 ].succ() && ! y.black() ) {
+						nodePath[ i ].black( true );
+						y.black( true );
+						nodePath[ i - 1 ].black( false );
+						i -= 2;
+					}
+					else {
+						Entry KEY_VALUE_GENERIC x;
+
+						if ( ! dirPath[ i ] ) y = nodePath[ i ];
+						else {
+							x = nodePath[ i ];
+							y = x.right;
+							x.right = y.left;
+							y.left = x;
+							nodePath[ i - 1 ].left = y;
+
+							if ( y.pred() ) {
+								y.pred( false );
+								x.succ( y );
+							}
+						}
+								
+						x = nodePath[ i - 1 ];
+						x.black( false );
+						y.black( true );
+								
+						x.left = y.right;
+						y.right = x;
+						if ( i < 2 ) tree = y;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = y;
+							else nodePath[ i - 2 ].left = y;
+						}
+
+						if ( y.succ() ) {
+							y.succ( false );
+							x.pred( y );
+						}
+						break;
+					}
+				}
+				else {
+					Entry KEY_VALUE_GENERIC y = nodePath[ i - 1 ].left;
+						  
+					if ( ! nodePath[ i - 1 ].pred() && ! y.black() ) {
+						nodePath[ i ].black( true );
+						y.black( true );
+						nodePath[ i - 1 ].black( false );
+						i -= 2;
+					}
+					else {
+						Entry KEY_VALUE_GENERIC x;
+
+						if ( dirPath[ i ] ) y = nodePath[ i ];
+						else {
+							x = nodePath[ i ];
+							y = x.left;
+							x.left = y.right;
+							y.right = x;
+							nodePath[ i - 1 ].right = y;
+	 
+							if ( y.succ() ) {
+								y.succ( false );
+								x.pred( y );
+							}
+	 
+						}
+
+						x = nodePath[ i - 1 ];
+						x.black( false );
+						y.black( true );
+
+						x.right = y.left;
+						y.left = x;
+						if ( i < 2 ) tree = y;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = y;
+							else nodePath[ i - 2 ].left = y;
+						}
+
+						if ( y.pred() ){
+							y.pred( false );
+							x.succ( y );
+						}
+
+						break;
+					}
+				}
+			}
+		}
+		tree.black( true );
+		// We clean up the node path, or we could have stale references later.
+		while( maxDepth-- != 0 ) nodePath[ maxDepth ] = null;
+		if ( ASSERTS ) {
+			checkNodePath();
+			checkTree( tree, 0, -1 );
+		}
+		return defRetValue;
+	}
+
+	 
+	/* After execution of this method, {@link #modified} is true iff an entry
+	has been deleted. */
+	 
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) {
+		modified = false;
+		  
+		if ( tree == null ) return defRetValue;
+
+		Entry KEY_VALUE_GENERIC p = tree;
+		int cmp;
+		int i = 0;
+		final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k;
+
+		while( true ) {
+			if ( ( cmp = compare( kk, p.key ) ) == 0 ) break;
+
+			dirPath[ i ] = cmp > 0;
+			nodePath[ i ] = p;
+
+			if ( dirPath[ i++ ] ) {
+				if ( ( p = p.right() ) == null ) {
+					// We clean up the node path, or we could have stale references later.
+					while( i-- != 0 ) nodePath[ i ] = null;
+					return defRetValue;
+				}
+			}
+			else {
+				if ( ( p = p.left() ) == null ) {
+					// We clean up the node path, or we could have stale references later.
+					while( i-- != 0 ) nodePath[ i ] = null;
+					return defRetValue;
+				}
+			}
+
+		}
+
+		if ( p.left == null ) firstEntry = p.next();
+		if ( p.right == null ) lastEntry = p.prev();
+
+		if ( p.succ() ) {
+			if ( p.pred() ) {
+				if ( i == 0 ) tree = p.left;
+				else {
+					if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].succ( p.right );
+					else nodePath[ i - 1 ].pred( p.left );
+				}
+			}
+			else {
+				p.prev().right = p.right;
+
+				if ( i == 0 ) tree = p.left;
+				else {
+					if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].right = p.left;
+					else nodePath[ i - 1 ].left = p.left;
+				}
+			}
+		}
+		else {
+			boolean color;
+			Entry KEY_VALUE_GENERIC r = p.right;
+
+			if ( r.pred() ) {
+				r.left = p.left;
+				r.pred( p.pred() );
+				if ( ! r.pred() ) r.prev().right = r;
+				if ( i == 0 ) tree = r;
+				else {
+					if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].right = r;
+					else nodePath[ i - 1 ].left = r;
+				}
+
+				color = r.black();
+				r.black( p.black() );
+				p.black( color );
+				dirPath[ i ] = true;
+				nodePath[ i++ ] = r;
+			}
+			else {
+				Entry KEY_VALUE_GENERIC s;
+				int j = i++;
+
+				while( true ) {
+					dirPath[ i ] = false;
+					nodePath[ i++ ] = r;
+					s = r.left;
+					if ( s.pred() ) break;
+					r = s;
+				}
+
+				dirPath[ j ] = true;
+				nodePath[ j ] = s;
+
+				if ( s.succ() ) r.pred( s );
+				else r.left = s.right;
+
+				s.left = p.left;
+
+				if ( ! p.pred() ) {
+					p.prev().right = s;
+					s.pred( false );
+				}
+
+				s.right( p.right );
+							
+				color = s.black();
+				s.black( p.black() );
+				p.black( color );
+	
+				if ( j == 0 ) tree = s;
+				else {
+					if ( dirPath[ j - 1 ] ) nodePath[ j - 1 ].right = s;
+					else nodePath[ j - 1 ].left = s;
+				}
+			}
+		}
+
+		int maxDepth = i;
+
+		if ( p.black() ) {
+			for( ; i > 0; i-- ) {
+				if ( dirPath[ i - 1 ] && ! nodePath[ i - 1 ].succ() ||
+					 ! dirPath[ i - 1 ] && ! nodePath[ i - 1 ].pred() ) {
+					Entry KEY_VALUE_GENERIC x = dirPath[ i - 1 ] ? nodePath[ i - 1 ].right : nodePath[ i - 1 ].left;
+
+					if ( ! x.black() ) {
+						x.black( true );
+						break;
+					}
+				}
+
+				if ( ! dirPath[ i - 1 ] ) {
+					Entry KEY_VALUE_GENERIC w = nodePath[ i - 1 ].right;
+
+					if ( ! w.black() ) {
+						w.black( true );
+						nodePath[ i - 1 ].black( false );
+
+						nodePath[ i - 1 ].right = w.left;
+						w.left = nodePath[ i - 1 ];
+
+						if ( i < 2 ) tree = w;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w;
+							else nodePath[ i - 2 ].left = w;
+						}
+
+						nodePath[ i ] = nodePath[ i - 1 ];
+						dirPath[ i ] = false;
+						nodePath[ i - 1 ] = w;
+						if ( maxDepth == i++ ) maxDepth++;
+
+						w = nodePath[ i - 1 ].right;
+					}
+
+					if ( ( w.pred() || w.left.black() ) &&
+						 ( w.succ() || w.right.black() ) ) {
+						w.black( false );
+					}
+					else {
+						if ( w.succ() || w.right.black() ) {
+							Entry KEY_VALUE_GENERIC y = w.left;
+									 
+							y.black ( true );
+							w.black( false );
+							w.left = y.right;
+							y.right = w;
+							w = nodePath[ i - 1 ].right = y;
+
+							if ( w.succ() ) {
+								w.succ( false );
+								w.right.pred( w );
+							}
+						}
+
+						w.black( nodePath[ i - 1 ].black() );
+						nodePath[ i - 1 ].black( true );
+						w.right.black( true );
+
+						nodePath[ i - 1 ].right = w.left;
+						w.left = nodePath[ i - 1 ];
+
+						if ( i < 2 ) tree = w;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w;
+							else nodePath[ i - 2 ].left = w;
+						}
+
+						if ( w.pred() ) {
+							w.pred( false );
+							nodePath[ i - 1 ].succ( w );
+						}
+						break;
+					}
+				}
+				else {
+					Entry KEY_VALUE_GENERIC w = nodePath[ i - 1 ].left;
+
+					if ( ! w.black() ) {
+						w.black ( true );
+						nodePath[ i - 1 ].black( false );
+
+						nodePath[ i - 1 ].left = w.right;
+						w.right = nodePath[ i - 1 ];
+
+						if ( i < 2 ) tree = w;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w;
+							else nodePath[ i - 2 ].left = w;
+						}
+
+						nodePath[ i ] = nodePath[ i - 1 ];
+						dirPath[ i ] = true;
+						nodePath[ i - 1 ] = w;
+						if ( maxDepth == i++ ) maxDepth++;
+
+						w = nodePath[ i - 1 ].left;
+					}
+						  
+					if ( ( w.pred() || w.left.black() ) &&
+						 ( w.succ() || w.right.black() ) ) {
+						w.black( false );
+					}
+					else {
+						if ( w.pred() || w.left.black() ) {
+							Entry KEY_VALUE_GENERIC y = w.right;
+
+							y.black( true );
+							w.black ( false );
+							w.right = y.left;
+							y.left = w;
+							w = nodePath[ i - 1 ].left = y;
+
+							if ( w.pred() ) {
+								w.pred( false );
+								w.left.succ( w );
+							}
+						}
+
+						w.black( nodePath[ i - 1 ].black() );
+						nodePath[ i - 1 ].black( true );
+						w.left.black( true );
+
+						nodePath[ i - 1 ].left = w.right;
+						w.right = nodePath[ i - 1 ];
+									  
+						if ( i < 2 ) tree = w;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w;
+							else nodePath[ i - 2 ].left = w;
+						}
+
+						if ( w.succ() ) {
+							w.succ( false );
+							nodePath[ i - 1 ].pred( w );
+						}
+						break;
+					}
+				}
+			}
+
+			if ( tree != null ) tree.black( true );
+		}
+
+		modified = true;
+		count--;
+		// We clean up the node path, or we could have stale references later.
+		while( maxDepth-- != 0 ) nodePath[ maxDepth ] = null;
+		if ( ASSERTS ) {
+			checkNodePath();
+			checkTree( tree, 0, -1 );
+		}
+		return p.value;
+	}
+
+
+
+#if ! #keyclass(Object) || #values(primitive)
+	public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) {
+		final VALUE_GENERIC_TYPE oldValue = put( KEY_CLASS2TYPE(ok), VALUE_CLASS2TYPE(ov) );
+		return modified ? OBJECT_DEFAULT_RETURN_VALUE : VALUE2OBJ( oldValue );
+	}
+#endif
+
+#if ! #keyclass(Object) || #values(primitive)
+	public VALUE_GENERIC_CLASS remove( final Object ok ) {
+		final VALUE_GENERIC_TYPE oldValue = REMOVE_VALUE( KEY_OBJ2TYPE( ok ) );
+		return modified ? VALUE2OBJ( oldValue ) : OBJECT_DEFAULT_RETURN_VALUE;
+	}
+#endif
+
+
+	public boolean containsValue( final VALUE_TYPE v ) {
+		final ValueIterator i = new ValueIterator();
+		VALUE_TYPE ev;
+		  
+		int j = count;
+		while( j-- != 0 ) {
+			ev = i.NEXT_VALUE();
+			if ( VALUE_EQUALS( ev, v ) ) return true;
+		}
+		  
+		return false;
+	}
+
+
+	public void clear() {
+		count = 0;
+		tree = null;
+		entries = null;
+		values = null;
+		keys = null;
+		firstEntry = lastEntry = null;
+	}
+
+	 
+	/** This class represent an entry in a tree map.
+	 *
+	 * <P>We use the only "metadata", i.e., {@link Entry#info}, to store
+	 * information about color, predecessor status and successor status.
+	 *
+	 * <P>Note that since the class is recursive, it can be
+	 * considered equivalently a tree.
+	 */
+
+	private static final class Entry KEY_VALUE_GENERIC implements Cloneable, MAP.Entry KEY_VALUE_GENERIC {
+		/** The the bit in this mask is true, the node is black. */
+		private final static int BLACK_MASK = 1;
+		/** If the bit in this mask is true, {@link #right} points to a successor. */
+		private final static int SUCC_MASK = 1 << 31;
+		/** If the bit in this mask is true, {@link #left} points to a predecessor. */
+		private final static int PRED_MASK = 1 << 30;
+		/** The key of this entry. */
+		KEY_GENERIC_TYPE key;
+		/** The value of this entry. */
+		VALUE_GENERIC_TYPE value;
+		/** The pointers to the left and right subtrees. */
+		Entry KEY_VALUE_GENERIC left, right;
+		/** This integers holds different information in different bits (see {@link #SUCC_MASK} and {@link #PRED_MASK}. */
+		int info;
+
+		Entry() {}
+
+		/** Creates a new entry with the given key and value.
+		 *
+		 * @param k a key.
+		 * @param v a value.
+		 */
+		Entry( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) {
+			this.key = k;
+			this.value = v;
+			info = SUCC_MASK | PRED_MASK;
+		}
+		  
+		/** Returns the left subtree. 
+		 *
+		 * @return the left subtree (<code>null</code> if the left
+		 * subtree is empty).
+		 */
+		Entry KEY_VALUE_GENERIC left() {
+			return ( info & PRED_MASK ) != 0 ? null : left;
+		}
+		  
+		/** Returns the right subtree. 
+		 *
+		 * @return the right subtree (<code>null</code> if the right
+		 * subtree is empty).
+		 */
+		Entry KEY_VALUE_GENERIC right() {
+			return ( info & SUCC_MASK ) != 0 ? null : right;
+		}
+		  
+		/** Checks whether the left pointer is really a predecessor.
+		 * @return true if the left pointer is a predecessor.
+		 */
+		boolean pred() {
+			return ( info & PRED_MASK ) != 0;
+		}
+		  
+		/** Checks whether the right pointer is really a successor.
+		 * @return true if the right pointer is a successor.
+		 */
+		boolean succ() {
+			return ( info & SUCC_MASK ) != 0;
+		}
+		  
+		/** Sets whether the left pointer is really a predecessor.
+		 * @param pred if true then the left pointer will be considered a predecessor.
+		 */
+		void pred( final boolean pred ) {
+			if ( pred ) info |= PRED_MASK;
+			else info &= ~PRED_MASK;
+		}
+		  
+		/** Sets whether the right pointer is really a successor.
+		 * @param succ if true then the right pointer will be considered a successor.
+		 */
+		void succ( final boolean succ ) {
+			if ( succ ) info |= SUCC_MASK;
+			else info &= ~SUCC_MASK;
+		}
+		  
+		/** Sets the left pointer to a predecessor.
+		 * @param pred the predecessr.
+		 */
+		void pred( final Entry KEY_VALUE_GENERIC pred ) {
+			info |= PRED_MASK;
+			left = pred;
+		}
+		  
+		/** Sets the right pointer to a successor.
+		 * @param succ the successor.
+		 */
+		void succ( final Entry KEY_VALUE_GENERIC succ ) {
+			info |= SUCC_MASK;
+			right = succ;
+		}
+		  
+		/** Sets the left pointer to the given subtree.
+		 * @param left the new left subtree.
+		 */
+		void left( final Entry KEY_VALUE_GENERIC left ) {
+			info &= ~PRED_MASK;
+			this.left = left;
+		}
+		  
+		/** Sets the right pointer to the given subtree.
+		 * @param right the new right subtree.
+		 */
+		void right( final Entry KEY_VALUE_GENERIC right ) {
+			info &= ~SUCC_MASK;
+			this.right = right;
+		}
+		  
+		  
+		/** Returns whether this node is black.
+		 * @return true iff this node is black.
+		 */
+		boolean black() {
+			return ( info & BLACK_MASK ) != 0;
+		}
+
+		/** Sets whether this node is black.
+		 * @param black if true, then this node becomes black; otherwise, it becomes red..
+		 */
+		void black( final boolean black ) {
+			if ( black ) info |= BLACK_MASK;
+			else info &= ~BLACK_MASK;
+		}
+
+		/** Computes the next entry in the set order.
+		 *
+		 * @return the next entry (<code>null</code>) if this is the last entry).
+		 */
+
+		Entry KEY_VALUE_GENERIC next() {
+			Entry KEY_VALUE_GENERIC next = this.right;
+			if ( ( info & SUCC_MASK ) == 0 ) while ( ( next.info & PRED_MASK ) == 0 ) next = next.left;
+			return next;
+		}
+
+		/** Computes the previous entry in the set order.
+		 *
+		 * @return the previous entry (<code>null</code>) if this is the first entry).
+		 */
+
+		Entry KEY_VALUE_GENERIC prev() {
+			Entry KEY_VALUE_GENERIC prev = this.left;
+			if ( ( info & PRED_MASK ) == 0 ) while ( ( prev.info & SUCC_MASK ) == 0 ) prev = prev.right;
+			return prev;
+		}
+
+		public KEY_GENERIC_CLASS getKey() {
+			return KEY2OBJ(key);
+		}
+		  
+#if ! #keyclass(Object)
+		public KEY_GENERIC_TYPE ENTRY_GET_KEY() {
+			return key;
+		}
+#endif
+		  
+		public VALUE_GENERIC_CLASS getValue() {
+			return VALUE2OBJ(value);
+		}
+		  
+#if #values(primitive)
+		public VALUE_TYPE ENTRY_GET_VALUE() {
+			return value;
+		}
+#endif
+		  
+		public VALUE_GENERIC_TYPE setValue(final VALUE_GENERIC_TYPE value) {
+			final VALUE_GENERIC_TYPE oldValue = this.value;
+			this.value = value;
+			return oldValue;
+		}
+		  
+#if #values(primitive)
+		  
+		public VALUE_GENERIC_CLASS setValue(final VALUE_GENERIC_CLASS value) {
+			return VALUE2OBJ(setValue(VALUE_CLASS2TYPE(value)));
+		}
+		  
+#endif
+		  
+		@SuppressWarnings("unchecked")
+		public Entry KEY_VALUE_GENERIC clone() {
+			Entry KEY_VALUE_GENERIC c;
+			try {
+				c = (Entry KEY_VALUE_GENERIC)super.clone();
+			}
+			catch(CloneNotSupportedException cantHappen) {
+				throw new InternalError();
+			}
+
+			c.key = key;
+			c.value = value;
+			c.info = info;
+
+			return c;
+		}
+		  
+		@SuppressWarnings("unchecked")
+		public boolean equals( final Object o ) {
+			if (!(o instanceof Map.Entry)) return false;
+			Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry <KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+				
+			return KEY_EQUALS( key, KEY_CLASS2TYPE( e.getKey() ) ) && VALUE_EQUALS( value, VALUE_CLASS2TYPE( e.getValue() ) );
+		}
+		  
+		public int hashCode() {
+			return KEY2JAVAHASH(key) ^ VALUE2JAVAHASH(value);
+		}
+		  
+		  
+		public String toString() {
+			return key + "=>" + value;
+		}
+		  
+		/*
+		  public void prettyPrint() {
+		  prettyPrint(0);
+		  }
+
+		  public void prettyPrint(int level) {
+		  if ( pred() ) {
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println("pred: " + left );
+		  }
+		  else if (left != null)
+		  left.prettyPrint(level +1 );
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println(key + "=" + value + " (" + balance() + ")");
+		  if ( succ() ) {
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println("succ: " + right );
+		  }
+		  else if (right != null)
+		  right.prettyPrint(level + 1);
+		  }*/
+	}
+	 
+	/*
+	  public void prettyPrint() {
+	  System.err.println("size: " + count);
+	  if (tree != null) tree.prettyPrint();
+	  }*/
+
+	@SuppressWarnings("unchecked")
+	public boolean containsKey( final KEY_TYPE k ) {
+		return findKey( KEY_GENERIC_CAST k ) != null;
+	}
+	 
+	public int size() {
+		return count;
+	}
+	 
+	public boolean isEmpty() {
+		return count == 0;
+	}
+	 
+	 
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) {
+		final Entry KEY_VALUE_GENERIC e = findKey( KEY_GENERIC_CAST k );
+		return e == null ? defRetValue : e.value;
+	}
+
+#if #keyclass(Object) && #values(primitive)
+
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_CLASS get( final Object ok ) {
+		final Entry KEY_VALUE_GENERIC e = findKey( KEY_GENERIC_CAST ok );
+		return e == null ? OBJECT_DEFAULT_RETURN_VALUE : e.getValue();
+	}
+
+#endif
+
+	public KEY_GENERIC_TYPE FIRST_KEY() {
+		if ( tree == null ) throw new NoSuchElementException();
+		return firstEntry.key;
+	}
+
+	public KEY_GENERIC_TYPE LAST_KEY() {
+		if ( tree == null ) throw new NoSuchElementException();
+		return lastEntry.key;
+	}
+
+
+	/** An abstract iterator on the whole range.
+	 *
+	 * <P>This class can iterate in both directions on a threaded tree.
+	 */
+
+	private class TreeIterator {
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). */
+		Entry KEY_VALUE_GENERIC prev;
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). */
+		Entry KEY_VALUE_GENERIC next;
+		/** The last entry that was returned (or <code>null</code> if we did not iterate or used {@link #remove()}). */
+		Entry KEY_VALUE_GENERIC curr;
+		/** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this {@link TreeIterator} has been created using the nonempty constructor.*/
+		int index = 0;
+		  
+		TreeIterator() {
+			next = firstEntry;
+		}
+
+		TreeIterator( final KEY_GENERIC_TYPE k ) {
+			if ( ( next = locateKey( k ) ) != null ) {
+				if ( compare( next.key, k ) <= 0 ) {
+					prev = next;
+					next = next.next();
+				}
+				else prev = next.prev();
+			}
+		}
+
+		public boolean hasNext() { return next != null; }
+		public boolean hasPrevious() { return prev != null; }
+
+		void updateNext() {
+			next = next.next();
+		}
+
+		Entry KEY_VALUE_GENERIC nextEntry() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			curr = prev = next;
+			index++;
+			updateNext();
+			return curr;
+		}
+
+		void updatePrevious() {
+			prev = prev.prev();
+		}
+
+		Entry KEY_VALUE_GENERIC previousEntry() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+			curr = next = prev;
+			index--;
+			updatePrevious();
+			return curr;
+		}
+
+		public int nextIndex() {
+			return index;
+		}
+
+		public int previousIndex() {
+			return index - 1;
+		}
+
+		public void remove() {
+			if ( curr == null ) throw new IllegalStateException();
+			/* If the last operation was a next(), we are removing an entry that preceeds
+			   the current index, and thus we must decrement it. */
+			if ( curr == prev ) index--;
+			next = prev = curr;
+			updatePrevious();
+			updateNext();
+			RB_TREE_MAP.this.REMOVE_VALUE( curr.key );
+			curr = null;
+		}
+
+		public int skip( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasNext() ) nextEntry(); 
+			return n - i - 1;
+		}
+
+		public int back( final int n ) { 
+			int i = n;
+			while( i-- != 0 && hasPrevious() ) previousEntry(); 
+			return n - i - 1;
+		}
+	}
+
+
+	/** An iterator on the whole range.
+	 *
+	 * <P>This class can iterate in both directions on a threaded tree.
+	 */
+
+	private class EntryIterator extends TreeIterator implements ObjectListIterator<MAP.Entry KEY_VALUE_GENERIC> {
+		EntryIterator() {}
+
+		EntryIterator( final KEY_GENERIC_TYPE k ) {
+			super( k );
+		}
+
+		public MAP.Entry KEY_VALUE_GENERIC next() { return nextEntry(); }
+		public MAP.Entry KEY_VALUE_GENERIC previous() { return previousEntry(); }
+
+		public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+		public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+	}
+
+
+	public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() {
+		if ( entries == null ) entries = new AbstractObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC>() {
+				final Comparator<? super MAP.Entry KEY_VALUE_GENERIC> comparator = new Comparator<MAP.Entry KEY_VALUE_GENERIC> () {
+					public int compare( final MAP.Entry KEY_VALUE_GENERIC x, MAP.Entry KEY_VALUE_GENERIC y ) {
+						return RB_TREE_MAP.this.storedComparator.compare( x.getKey(), y.getKey() );
+					}
+				};
+
+				public Comparator<? super MAP.Entry KEY_VALUE_GENERIC> comparator() {
+					return comparator;
+				}
+
+				public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+					return new EntryIterator();
+				}
+
+				public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator( final MAP.Entry KEY_VALUE_GENERIC from ) {
+					return new EntryIterator( KEY_CLASS2TYPE( from.getKey() ) );
+				}
+
+				@SuppressWarnings("unchecked")
+				public boolean contains( final Object o ) {
+					if (!(o instanceof Map.Entry)) return false;
+					final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+					final Entry KEY_VALUE_GENERIC f = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+					return e.equals( f );
+				}					 
+
+				@SuppressWarnings("unchecked")
+				public boolean remove( final Object o ) {
+					if (!(o instanceof Map.Entry)) return false;
+					final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+					final Entry KEY_VALUE_GENERIC f = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+					if ( f != null ) RB_TREE_MAP.this.REMOVE_VALUE( f.key );
+					return f != null;
+				}
+
+				public int size() { return count; }
+				public void clear() { RB_TREE_MAP.this.clear(); }
+					 
+				public MAP.Entry KEY_VALUE_GENERIC first() { return firstEntry; }
+				public MAP.Entry KEY_VALUE_GENERIC last() { return lastEntry; }
+				public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> subSet( MAP.Entry KEY_VALUE_GENERIC from, MAP.Entry KEY_VALUE_GENERIC to  ) { return subMap( from.getKey(), to.getKey() ).ENTRYSET(); }
+				public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> headSet( MAP.Entry KEY_VALUE_GENERIC to  ) { return headMap( to.getKey() ).ENTRYSET(); }
+				public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> tailSet( MAP.Entry KEY_VALUE_GENERIC from  ) { return tailMap( from.getKey() ).ENTRYSET(); }
+			};
+
+		return entries;
+	}
+
+	/** An iterator on the whole range of keys.
+	 *
+	 * <P>This class can iterate in both directions on the keys of a threaded tree. We 
+	 * simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly
+	 * their type-specific counterparts) so that they return keys instead of entries.
+	 */
+	private final class KeyIterator extends TreeIterator implements KEY_LIST_ITERATOR KEY_GENERIC {
+		public KeyIterator() {}
+		public KeyIterator( final KEY_GENERIC_TYPE k ) { super( k ); }
+		public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; }
+		public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; }
+
+		public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+		public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+
+#if !#keyclass(Object)
+		public KEY_GENERIC_CLASS next() { return KEY2OBJ( nextEntry().key ); }
+		public KEY_GENERIC_CLASS previous() { return KEY2OBJ( previousEntry().key ); }
+		public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+		public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif							
+	};
+
+
+	/** A keyset implementation using a more direct implementation for iterators. */
+	private class KeySet extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC.KeySet {
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new KeyIterator();	}
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new KeyIterator( from ); }
+	}
+
+	/** Returns a type-specific sorted set view of the keys contained in this map.
+	 *
+	 * <P>In addition to the semantics of {@link java.util.Map#keySet()}, you can
+	 * safely cast the set returned by this call to a type-specific sorted
+	 * set interface.
+	 *
+	 * @return a type-specific sorted set view of the keys contained in this map.
+	 */
+	public SORTED_SET KEY_GENERIC keySet() {
+		if ( keys == null ) keys = new KeySet();
+		return keys;
+	}
+
+	/** An iterator on the whole range of values.
+	 *
+	 * <P>This class can iterate in both directions on the values of a threaded tree. We 
+	 * simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly
+	 * their type-specific counterparts) so that they return values instead of entries.
+	 */
+	private final class ValueIterator extends TreeIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC {
+		public VALUE_GENERIC_TYPE NEXT_VALUE() { return nextEntry().value; }
+		public VALUE_GENERIC_TYPE PREV_VALUE() { return previousEntry().value; }
+		public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+		public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+
+#if #values(primitive)
+		public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( nextEntry().value ); }
+		public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( previousEntry().value ); }
+		public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+		public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif									
+	};
+
+	/** Returns a type-specific collection view of the values contained in this map.
+	 *
+	 * <P>In addition to the semantics of {@link java.util.Map#values()}, you can
+	 * safely cast the collection returned by this call to a type-specific collection
+	 * interface.
+	 *
+	 * @return a type-specific collection view of the values contained in this map.
+	 */
+
+	public VALUE_COLLECTION VALUE_GENERIC values() {
+		if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() {
+				public VALUE_ITERATOR VALUE_GENERIC iterator() {
+					return new ValueIterator();
+				}
+
+				public boolean contains( final VALUE_TYPE k ) {
+					return containsValue( k );
+				} 
+
+				public int size() {
+					return count;
+				}
+					 
+				public void clear() {
+					RB_TREE_MAP.this.clear();
+				}
+					 
+			};
+
+		return values;
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+		return actualComparator;
+	}
+
+	public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_TYPE to ) {
+		return new Submap( KEY_NULL, true, to, false );
+	}
+
+	public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_TYPE from ) {
+		return new Submap( from, false, KEY_NULL, true );
+	}
+
+	public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from,  KEY_GENERIC_TYPE to ) {
+		return new Submap( from, false, to, false );
+	}
+
+	/** A submap with given range.
+	 *
+	 * <P>This class represents a submap. One has to specify the left/right
+	 * limits (which can be set to -∞ or ∞). Since the submap is a
+	 * view on the map, at a given moment it could happen that the limits of
+	 * the range are not any longer in the main map. Thus, things such as
+	 * {@link java.util.SortedMap#firstKey()} or {@link java.util.Collection#size()} must be always computed
+	 * on-the-fly.  
+	 */
+	private final class Submap extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC implements java.io.Serializable {
+    	private static final long serialVersionUID = -7046029254386353129L;
+
+		/** The start of the submap range, unless {@link #bottom} is true. */
+		KEY_GENERIC_TYPE from;
+		/** The end of the submap range, unless {@link #top} is true. */
+		KEY_GENERIC_TYPE to;
+		/** If true, the submap range starts from -∞. */
+		boolean bottom;
+		/** If true, the submap range goes to ∞. */
+		boolean top;
+		/** Cached set of entries. */
+		@SuppressWarnings("hiding")
+		protected transient volatile ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> entries;
+		/** Cached set of keys. */
+		@SuppressWarnings("hiding")
+		protected transient volatile SORTED_SET KEY_GENERIC keys;
+		/** Cached collection of values. */
+		@SuppressWarnings("hiding")
+		protected transient volatile VALUE_COLLECTION VALUE_GENERIC values;
+		  
+		/** Creates a new submap with given key range.
+		 *
+		 * @param from the start of the submap range.
+		 * @param bottom if true, the first parameter is ignored and the range starts from -∞.
+		 * @param to the end of the submap range.
+		 * @param top if true, the third parameter is ignored and the range goes to ∞.
+		 */
+		public Submap( final KEY_GENERIC_TYPE from, final boolean bottom, final KEY_GENERIC_TYPE to, final boolean top ) {
+			if ( ! bottom && ! top && RB_TREE_MAP.this.compare( from, to ) > 0 ) throw new IllegalArgumentException( "Start key (" + from  + ") is larger than end key (" + to + ")" );
+
+			this.from = from;
+			this.bottom = bottom;
+			this.to = to;
+			this.top = top;
+			this.defRetValue = RB_TREE_MAP.this.defRetValue;
+		}
+
+		public void clear() {
+			final SubmapIterator i = new SubmapIterator();
+			while( i.hasNext() ) {
+				i.nextEntry();
+				i.remove();
+			}
+		}
+
+		/** Checks whether a key is in the submap range.
+		 * @param k a key.
+		 * @return true if is the key is in the submap range.
+		 */
+		final boolean in( final KEY_GENERIC_TYPE k ) {
+			return ( bottom || RB_TREE_MAP.this.compare( k, from ) >= 0 ) &&
+				( top || RB_TREE_MAP.this.compare( k, to ) < 0 );
+		}
+
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() {
+			if ( entries == null ) entries = new AbstractObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC>() {
+					public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator() {
+						return new SubmapEntryIterator();
+					}
+
+					public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> iterator( final MAP.Entry KEY_VALUE_GENERIC from ) {
+						return new SubmapEntryIterator( KEY_CLASS2TYPE( from.getKey() ) );
+					}
+
+					public Comparator<? super MAP.Entry KEY_VALUE_GENERIC> comparator() { return RB_TREE_MAP.this.ENTRYSET().comparator(); }
+
+					@SuppressWarnings("unchecked")
+					public boolean contains( final Object o ) {
+						if (!(o instanceof Map.Entry)) return false;
+						final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+						final RB_TREE_MAP.Entry KEY_VALUE_GENERIC f = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+						return f != null && in( f.key ) && e.equals( f );
+					}					 
+
+					@SuppressWarnings("unchecked")
+					public boolean remove( final Object o ) {
+						if (!(o instanceof Map.Entry)) return false;
+						final Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> e = (Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>)o;
+						final RB_TREE_MAP.Entry KEY_VALUE_GENERIC f = findKey( KEY_CLASS2TYPE( e.getKey() ) );
+						if ( f != null && in( f.key ) ) Submap.this.REMOVE_VALUE( f.key );
+						return f != null;
+					}
+
+					public int size() {
+						int c = 0;
+						for( Iterator<?> i = iterator(); i.hasNext(); i.next() ) c++;
+						return c;
+					}
+
+					public boolean isEmpty() { return ! new SubmapIterator().hasNext(); }
+					public void clear() { Submap.this.clear(); }
+					 
+					public MAP.Entry KEY_VALUE_GENERIC first() { return firstEntry(); }
+					public MAP.Entry KEY_VALUE_GENERIC last() { return lastEntry(); }
+					public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> subSet( MAP.Entry KEY_VALUE_GENERIC from, MAP.Entry KEY_VALUE_GENERIC to  ) { return subMap( from.getKey(), to.getKey() ).ENTRYSET(); }
+					public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> headSet( MAP.Entry KEY_VALUE_GENERIC to  ) { return headMap( to.getKey() ).ENTRYSET(); }
+					public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> tailSet( MAP.Entry KEY_VALUE_GENERIC from  ) { return tailMap( from.getKey() ).ENTRYSET(); }
+				};
+
+			return entries;
+		}
+
+		private class KeySet extends ABSTRACT_SORTED_MAP KEY_VALUE_GENERIC.KeySet {
+			public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return new SubmapKeyIterator();	}
+			public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return new SubmapKeyIterator( from ); }
+		}
+
+		public SORTED_SET KEY_GENERIC keySet() {
+			if ( keys == null ) keys = new KeySet();
+			return keys;
+		}
+		  
+		public VALUE_COLLECTION VALUE_GENERIC values() {
+			if ( values == null ) values = new VALUE_ABSTRACT_COLLECTION VALUE_GENERIC() {
+					public VALUE_ITERATOR VALUE_GENERIC iterator() {
+						return new SubmapValueIterator();
+					}
+
+					public boolean contains( final VALUE_TYPE k ) {
+						return containsValue( k );
+					} 
+
+					public int size() {
+						return Submap.this.size();
+					}
+						  
+					public void clear() {
+						Submap.this.clear();
+					}
+						  
+				};
+				
+			return values;
+		}
+		  
+		@SuppressWarnings("unchecked")
+		public boolean containsKey( final KEY_TYPE k ) {
+			return in( KEY_GENERIC_CAST k ) && RB_TREE_MAP.this.containsKey( k );
+		}
+
+		public boolean containsValue( final VALUE_TYPE v ) {
+			final SubmapIterator i = new SubmapIterator();
+			VALUE_TYPE ev;
+				
+			while( i.hasNext() ) {
+				ev = i.nextEntry().value;
+				if ( VALUE_EQUALS( ev, v ) ) return true;
+			}
+				
+			return false;
+		}
+		  
+
+		@SuppressWarnings("unchecked")
+		public VALUE_GENERIC_TYPE GET_VALUE(final KEY_TYPE k) {
+			final RB_TREE_MAP.Entry KEY_VALUE_GENERIC e;
+			final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k;
+			return in( kk ) && ( e = findKey( kk ) ) != null ? e.value : this.defRetValue;
+		}
+		  
+		  
+#if #keyclass(Object) && #values(primitive)
+
+		@SuppressWarnings("unchecked")
+		public VALUE_GENERIC_CLASS get( final Object ok ) {
+			final RB_TREE_MAP.Entry KEY_VALUE_GENERIC e;
+			final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST KEY_OBJ2TYPE( ok );
+			return in( kk ) && ( e = findKey( kk ) ) != null ? e.getValue() : OBJECT_DEFAULT_RETURN_VALUE;
+		}
+#endif
+		  
+		public VALUE_GENERIC_TYPE put(final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v) {
+			modified = false;
+			if ( ! in( k ) ) throw new IllegalArgumentException( "Key (" + k + ") out of range [" + ( bottom ? "-" : String.valueOf( from ) ) + ", " + ( top ? "-" : String.valueOf( to ) ) + ")" ); 
+			final VALUE_GENERIC_TYPE oldValue = RB_TREE_MAP.this.put( k, v );
+			return modified ? this.defRetValue : oldValue;
+		}
+
+		  
+#if ! #keyclass(Object) || #values(primitive)
+		public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) {
+			final VALUE_GENERIC_TYPE oldValue = put( KEY_CLASS2TYPE(ok), VALUE_CLASS2TYPE(ov) );
+			return modified ? OBJECT_DEFAULT_RETURN_VALUE : VALUE2OBJ( oldValue );
+		}
+#endif
+
+		@SuppressWarnings("unchecked")
+		public VALUE_GENERIC_TYPE REMOVE_VALUE( final KEY_TYPE k ) {
+			modified = false;
+			if ( ! in( KEY_GENERIC_CAST k ) ) return this.defRetValue;
+			final VALUE_GENERIC_TYPE oldValue = RB_TREE_MAP.this.REMOVE_VALUE( k );
+			return modified ? oldValue : this.defRetValue;
+		}
+
+#if ! #keyclass(Object) || #values(primitive)
+		public VALUE_GENERIC_CLASS remove( final Object ok ) {
+			final VALUE_GENERIC_TYPE oldValue = REMOVE_VALUE( KEY_OBJ2TYPE( ok ) );
+			return modified ? VALUE2OBJ( oldValue ) : OBJECT_DEFAULT_RETURN_VALUE;
+		}
+#endif
+
+		public int size() {
+			final SubmapIterator i = new SubmapIterator();
+			int n = 0;
+				
+			while( i.hasNext() ) {
+				n++;
+				i.nextEntry();
+			}
+				
+			return n;
+		}
+
+
+		public boolean isEmpty() {
+			return ! new SubmapIterator().hasNext();
+		}
+		  
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+			return actualComparator;
+		}
+		  
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) {
+			if ( top ) return new Submap( from, bottom, to, false );
+			return compare( to, this.to ) < 0 ? new Submap( from, bottom, to, false ) : this;
+		}
+		  
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) {
+			if ( bottom ) return new Submap( from, false, to, top );
+			return compare( from, this.from ) > 0 ? new Submap( from, false, to, top ) : this;
+		}
+		  
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_TYPE from,  KEY_GENERIC_TYPE to ) {
+			if ( top && bottom ) return new Submap( from, false, to, false );
+			if ( ! top ) to = compare( to, this.to ) < 0 ? to : this.to;
+			if ( ! bottom ) from = compare( from, this.from ) > 0 ? from : this.from;
+			if ( ! top && ! bottom && from == this.from && to == this.to ) return this;
+			return new Submap( from, false, to, false );
+		}
+
+		/** Locates the first entry.
+		 *
+		 * @return the first entry of this submap, or <code>null</code> if the submap is empty.
+		 */
+		public RB_TREE_MAP.Entry KEY_VALUE_GENERIC firstEntry() {
+			if ( tree == null ) return null;
+			// If this submap goes to -infinity, we return the main map first entry; otherwise, we locate the start of the map.
+			RB_TREE_MAP.Entry KEY_VALUE_GENERIC e;
+			if ( bottom ) e = firstEntry;
+			else {
+				e = locateKey( from );
+				// If we find either the start or something greater we're OK.
+				if ( compare( e.key, from ) < 0 ) e = e.next();
+			}
+			// Finally, if this submap doesn't go to infinity, we check that the resulting key isn't greater than the end.
+			if ( e == null || ! top && compare( e.key, to ) >= 0 ) return null;
+			return e;
+		}
+	 
+		/** Locates the last entry.
+		 *
+		 * @return the last entry of this submap, or <code>null</code> if the submap is empty.
+		 */
+		public RB_TREE_MAP.Entry KEY_VALUE_GENERIC lastEntry() {
+			if ( tree == null ) return null;
+			// If this submap goes to infinity, we return the main map last entry; otherwise, we locate the end of the map.
+			RB_TREE_MAP.Entry KEY_VALUE_GENERIC e;
+			if ( top ) e = lastEntry;
+			else {
+				e = locateKey( to );
+				// If we find something smaller than the end we're OK.
+				if ( compare( e.key, to ) >= 0 ) e = e.prev();
+			}
+			// Finally, if this submap doesn't go to -infinity, we check that the resulting key isn't smaller than the start.
+			if ( e == null || ! bottom && compare( e.key, from ) < 0 ) return null;
+			return e;
+		}
+
+		public KEY_GENERIC_TYPE FIRST_KEY() {
+			RB_TREE_MAP.Entry KEY_VALUE_GENERIC e = firstEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.key;
+		}
+		public KEY_GENERIC_TYPE LAST_KEY() {
+			RB_TREE_MAP.Entry KEY_VALUE_GENERIC e = lastEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.key;
+		}
+	 
+#if !#keyclass(Object)
+		public KEY_GENERIC_CLASS firstKey() {
+			RB_TREE_MAP.Entry KEY_VALUE_GENERIC e = firstEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.getKey();
+		}
+	 
+		public KEY_GENERIC_CLASS lastKey() {
+			RB_TREE_MAP.Entry KEY_VALUE_GENERIC e = lastEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.getKey();
+		}
+#endif
+
+		/** An iterator for subranges.
+		 * 
+		 * <P>This class inherits from {@link TreeIterator}, but overrides the methods that
+		 * update the pointer after a {@link java.util.ListIterator#next()} or {@link java.util.ListIterator#previous()}. If we would
+		 * move out of the range of the submap we just overwrite the next or previous
+		 * entry with <code>null</code>.
+		 */
+		private class SubmapIterator extends TreeIterator {
+			SubmapIterator() {
+				next = firstEntry();
+			}
+
+			SubmapIterator( final KEY_GENERIC_TYPE k ) {
+				this();
+					 
+				if ( next != null ) {
+					if ( ! bottom && compare( k, next.key ) < 0 ) prev = null;
+					else if ( ! top && compare( k, ( prev = lastEntry() ).key ) >= 0 ) next = null;
+					else {
+						next = locateKey( k );
+								
+						if ( compare( next.key, k ) <= 0 ) {
+							prev = next;
+							next = next.next();
+						}
+						else prev = next.prev();
+					}
+				}			
+			}
+
+			void updatePrevious() {
+				prev = prev.prev();
+				if ( ! bottom && prev != null && RB_TREE_MAP.this.compare( prev.key, from ) < 0 ) prev = null;
+			}
+
+			void updateNext() {
+				next = next.next();
+				if ( ! top && next != null && RB_TREE_MAP.this.compare( next.key, to ) >= 0 ) next = null;
+			}
+		}
+
+		private class SubmapEntryIterator extends SubmapIterator implements ObjectListIterator<MAP.Entry KEY_VALUE_GENERIC> {
+			SubmapEntryIterator() {}
+
+			SubmapEntryIterator( final KEY_GENERIC_TYPE k ) {
+				super( k );
+			}
+
+			public MAP.Entry KEY_VALUE_GENERIC next() { return nextEntry(); }
+			public MAP.Entry KEY_VALUE_GENERIC previous() { return previousEntry(); }
+
+			public void set( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+			public void add( MAP.Entry KEY_VALUE_GENERIC ok ) { throw new UnsupportedOperationException(); }
+		}
+
+
+		/** An iterator on a subrange of keys.
+		 *
+		 * <P>This class can iterate in both directions on a subrange of the
+		 * keys of a threaded tree. We simply override the {@link
+		 * java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly their
+		 * type-specific counterparts) so that they return keys instead of
+		 * entries.
+		 */
+		private final class SubmapKeyIterator extends SubmapIterator implements KEY_LIST_ITERATOR KEY_GENERIC {
+			public SubmapKeyIterator() { super(); }
+			public SubmapKeyIterator( KEY_GENERIC_TYPE from ) { super( from ); }
+			public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; }
+			public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; }
+			public void set( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+			public void add( KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+#if !#keyclass(Object)
+			public KEY_GENERIC_CLASS next() { return KEY2OBJ( nextEntry().key ); }
+			public KEY_GENERIC_CLASS previous() { return KEY2OBJ( previousEntry().key ); }
+			public void set( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+			public void add( KEY_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif									
+		};
+		  
+		/** An iterator on a subrange of values.
+		 *
+		 * <P>This class can iterate in both directions on the values of a
+		 * subrange of the keys of a threaded tree. We simply override the
+		 * {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods (and possibly their
+		 * type-specific counterparts) so that they return values instead of
+		 * entries.  
+		 */
+		private final class SubmapValueIterator extends SubmapIterator implements VALUE_LIST_ITERATOR VALUE_GENERIC {
+			public VALUE_GENERIC_TYPE NEXT_VALUE() { return nextEntry().value; }
+			public VALUE_GENERIC_TYPE PREV_VALUE() { return previousEntry().value; }
+			public void set( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+			public void add( VALUE_GENERIC_TYPE v ) { throw new UnsupportedOperationException(); }
+
+#if #values(primitive)
+			public VALUE_GENERIC_CLASS next() { return VALUE2OBJ( nextEntry().value ); }
+			public VALUE_GENERIC_CLASS previous() { return VALUE2OBJ( previousEntry().value ); }
+			public void set( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+			public void add( VALUE_CLASS ok ) { throw new UnsupportedOperationException(); }
+#endif									
+		};
+
+
+	}
+	 
+
+	/** Returns a deep copy of this tree map.
+	 *
+	 * <P>This method performs a deep copy of this tree map; the data stored in the
+	 * set, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 * @return a deep copy of this tree map.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public RB_TREE_MAP KEY_VALUE_GENERIC clone() {
+		RB_TREE_MAP KEY_VALUE_GENERIC c;
+		try {
+			c = (RB_TREE_MAP KEY_VALUE_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+
+		c.keys = null;
+		c.values = null;
+		c.entries = null;
+		c.allocatePaths();
+
+		if ( count != 0 ) {
+			// Also this apparently unfathomable code is derived from GNU libavl.
+			Entry KEY_VALUE_GENERIC e, p, q, rp = new Entry KEY_VALUE_GENERIC(), rq = new Entry KEY_VALUE_GENERIC();
+
+			p = rp;
+			rp.left( tree );
+
+			q = rq;
+			rq.pred( null );
+
+			while( true ) {
+				if ( ! p.pred() ) {
+					e = p.left.clone();
+					e.pred( q.left );
+					e.succ( q );
+					q.left( e );
+
+					p = p.left;
+					q = q.left;
+				}
+				else {
+					while( p.succ() ) {
+						p = p.right;
+
+						if ( p == null ) {
+							q.right = null;
+							c.tree = rq.left;
+
+							c.firstEntry = c.tree;
+							while( c.firstEntry.left != null ) c.firstEntry = c.firstEntry.left;
+							c.lastEntry = c.tree;
+							while( c.lastEntry.right != null ) c.lastEntry = c.lastEntry.right;
+
+							return c;
+						}
+						q = q.right;
+					}
+								
+					p = p.right;
+					q = q.right;
+				}
+
+				if ( ! p.succ() ) {
+					e = p.right.clone();
+					e.succ( q.right );
+					e.pred( q );
+					q.right( e );
+				}
+			}
+		}
+
+		return c;
+	}
+
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		int n = count;
+		EntryIterator i = new EntryIterator();
+		Entry KEY_VALUE_GENERIC e;
+
+		s.defaultWriteObject();
+
+		while(n-- != 0) {
+			e = i.nextEntry();
+			s.WRITE_KEY( e.key );
+			s.WRITE_VALUE( e.value );
+		}
+	}
+
+
+	/** Reads the given number of entries from the input stream, returning the corresponding tree. 
+	 *
+	 * @param s the input stream.
+	 * @param n the (positive) number of entries to read.
+	 * @param pred the entry containing the key that preceeds the first key in the tree.
+	 * @param succ the entry containing the key that follows the last key in the tree.
+	 */
+	@SuppressWarnings("unchecked")
+	private Entry KEY_VALUE_GENERIC readTree( final java.io.ObjectInputStream s, final int n, final Entry KEY_VALUE_GENERIC pred, final Entry KEY_VALUE_GENERIC succ ) throws java.io.IOException, ClassNotFoundException {
+		if ( n == 1 ) {
+			final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() );
+			top.pred( pred );
+			top.succ( succ );
+			top.black( true );
+
+			return top;
+		}
+
+		if ( n == 2 ) {
+			/* We handle separately this case so that recursion will
+			 *always* be on nonempty subtrees. */
+			final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() );
+			top.black( true );
+			top.right( new Entry KEY_VALUE_GENERIC( KEY_GENERIC_CAST s.READ_KEY(), VALUE_GENERIC_CAST s.READ_VALUE() ) );
+			top.right.pred( top );
+			top.pred( pred );
+			top.right.succ( succ );
+			
+			return top;
+		}
+
+		// The right subtree is the largest one.
+		final int rightN = n / 2, leftN = n - rightN - 1;
+
+		final Entry KEY_VALUE_GENERIC top = new Entry KEY_VALUE_GENERIC();
+
+		top.left( readTree( s, leftN, pred, top ) );
+		
+		top.key = KEY_GENERIC_CAST s.READ_KEY();
+		top.value = VALUE_GENERIC_CAST s.READ_VALUE();
+		top.black( true );
+
+		top.right( readTree( s, rightN, top, succ ) );
+
+		if ( n + 2 == ( ( n + 2 )  & -( n + 2 ) ) ) top.right.black( false ); // Quick test for determining whether n + 2 is a power of 2.
+
+		return top;
+	}
+
+	private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		/* The storedComparator is now correctly set, but we must restore
+		   on-the-fly the actualComparator. */
+		setActualComparator();
+		allocatePaths();
+
+		if ( count != 0 ) {
+			tree = readTree( s, count, null, null );
+			Entry KEY_VALUE_GENERIC e;
+
+			e = tree;
+			while( e.left() != null ) e = e.left();
+			firstEntry = e;
+
+			e = tree;
+			while( e.right() != null ) e = e.right();
+			lastEntry = e;
+		}
+
+		if ( ASSERTS ) checkTree( tree, 0, -1 );
+
+	}
+
+#ifdef ASSERTS_CODE
+	private void checkNodePath() {
+		for( int i = nodePath.length; i-- != 0; ) assert nodePath[ i ] == null : i;
+	}
+
+	private int checkTree( Entry KEY_VALUE_GENERIC e, int d, int D ) {
+		if ( e == null ) return 0;
+		if ( e.black() ) d++;
+		if ( e.left() != null ) D = checkTree( e.left(), d, D );
+		if ( e.right() != null ) D = checkTree( e.right(), d, D );
+		if ( e.left() == null && e.right() == null ) {
+			if ( D == -1 ) D = d;
+			else if ( D != d ) throw new AssertionError( "Mismatch between number of black nodes (" + D + " and " + d + ")" );
+		}
+		return D;
+	}
+#else
+	private void checkNodePath() {}
+	@SuppressWarnings("unused")
+	private int checkTree( Entry KEY_VALUE_GENERIC e, int d, int D ) { return 0; }
+#endif
+
+
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+	}
+
+	private static VALUE_TYPE genValue() {
+#if #valueclass(Byte) || #valueclass(Short) || #valueclass(Character)
+		return (VALUE_TYPE)(r.nextInt());
+#elif #values(primitive)
+		return r.NEXT_VALUE();
+#elif !#valueclass(Reference) || #keyclass(Reference)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		int i, j;
+		RB_TREE_MAP m;
+		java.util.TreeMap t;
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		VALUE_TYPE v[] = new VALUE_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+			v[i] = genValue();
+		}
+
+		double totPut = 0, totYes = 0, totNo = 0, totIterFor = 0, totIterBack = 0, totRemYes = 0, d, dd;
+
+		if ( comp ) { for( j = 0; j < 20; j++ ) {
+
+			t = new java.util.TreeMap();
+
+
+			/* We first add all pairs to t. */
+			for( i = 0; i < n;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) );
+
+			/* Then we remove the first half and put it back. */
+			for( i = 0; i < n/2;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] ) );
+			d = System.currentTimeMillis() - ms;
+
+			/* Then we remove the other half and put it back again. */
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			dd = System.currentTimeMillis() - ms ;
+
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] )  );
+			d += System.currentTimeMillis() - ms;
+			if ( j > 2 ) totPut += n/d; 				
+			System.out.print("Add: " + format( n/d ) +" K/s " );
+
+			/* Then we remove again the first half. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+			dd += System.currentTimeMillis() - ms ;
+			if ( j > 2 ) totRemYes += n/dd; 				
+			System.out.print("RemYes: " + format( n/dd ) +" K/s " );
+
+			/* And then we put it back. */
+			for( i = 0; i < n/2;  i++ ) t.put( KEY2OBJ( k[i] ), VALUE2OBJ( v[i] )  );
+
+			/* We check for pairs in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.containsKey( KEY2OBJ( k[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in t. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) t.containsKey( KEY2OBJ( nk[i] ) );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on t. */
+			ms = System.currentTimeMillis();
+			for( Iterator it = t.entrySet().iterator(); it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterFor += d; 				
+			System.out.print("IterFor: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+		System.out.println();
+		System.out.println( "java.util Put: " + format( totPut/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) )+ " K/s IterFor: " + format( totIterFor/(j-3) )  + " K/s"  );
+
+		System.out.println();
+
+		t = null;
+		totPut = totYes = totNo = totIterFor = totIterBack = totRemYes = 0;
+
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new RB_TREE_MAP();
+
+
+			/* We first add all pairs to m. */
+			for( i = 0; i < n;  i++ ) m.put( k[i], v[i] );
+
+			/* Then we remove the first half and put it back. */
+			for( i = 0; i < n/2;  i++ ) m.remove( k[i] );
+
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) m.put( k[i], v[i] );
+			d = System.currentTimeMillis() - ms;
+
+			/* Then we remove the other half and put it back again. */
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) m.remove( k[i] );
+			dd = System.currentTimeMillis() - ms ;
+
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) m.put( k[i], v[i]  );
+			d += System.currentTimeMillis() - ms;
+			if ( j > 2 ) totPut += n/d; 				
+			System.out.print("Add: " + format( n/d ) +" K/s " );
+
+			/* Then we remove again the first half. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) m.remove( k[i] );
+			dd += System.currentTimeMillis() - ms ;
+			if ( j > 2 ) totRemYes += n/dd; 				
+			System.out.print("RemYes: " + format( n/dd ) +" K/s " );
+
+			/* And then we put it back. */
+			for( i = 0; i < n/2;  i++ ) m.put( k[i], v[i]  );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.containsKey( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.containsKey( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on m. */
+			java.util.ListIterator it = (java.util.ListIterator)m.entrySet().iterator();
+			ms = System.currentTimeMillis();
+			for( ; it.hasNext(); it.next() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterFor += d; 				
+			System.out.print("IterFor: " + format( d ) +" K/s " );
+				
+			/* We iterate back on m. */
+			ms = System.currentTimeMillis();
+			for( ; it.hasPrevious(); it.previous() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterBack += d; 				
+			System.out.print("IterBack: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Put: " + format( totPut/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) ) + " K/s IterBack: " + format( totIterBack/(j-3) ) + "K/s"  );
+
+		System.out.println();
+
+	}
+
+
+	private static boolean valEquals(Object o1, Object o2) {
+		return o1 == null ? o2 == null : o1.equals(o2);
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static Object[] k, v, nk;
+	private static KEY_TYPE kt[];
+	private static KEY_TYPE nkt[];
+	private static VALUE_TYPE vt[];
+	private static RB_TREE_MAP topMap;
+
+	protected static void testMaps( SORTED_MAP m, SortedMap t, int n, int level ) {
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement;
+		Object rt = null, rm = null;
+
+		if ( level > 4 ) return;
+				
+
+		/* Now we check that both maps agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.firstKey();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.firstKey();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence at start in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.firstKey() + ", " + t.firstKey() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.lastKey();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.lastKey();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence at start in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+
+		if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.lastKey() + ", " + t.lastKey() +")");
+
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+
+
+		/* Now we check that m actually holds that data. */
+		for(Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			Entry e = (Entry)i.next();
+			ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on m)" );
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		for(Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after insertion (iterating on t)" );
+			ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for(Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after insertion (iterating on m)" );
+			ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after insertion (iterating on m)" );
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		for(Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on t)" );
+			ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for(Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on m)");
+			ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on m)");
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.containsKey(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.containsKey(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): containsKey() divergence in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): containsKey() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) {
+				ensure( m.containsKey(KEY2OBJ(T)) == t.containsKey(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in keys between t and m (polymorphic method)" );
+					 
+#if #keyclass(Object) && ! ( #values(reference) )
+				if ((m.GET_VALUE(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+					t.get(KEY2OBJ(T)) != null && 
+					! VALUE2OBJ(m.GET_VALUE(T)).equals(t.get(KEY2OBJ(T)))) 
+#else
+					if ((m.get(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+						t.get(KEY2OBJ(T)) != null && 
+						! m.get(KEY2OBJ(T)).equals(t.get(KEY2OBJ(T)))) 
+#endif
+						{
+							System.out.println("Error (" + level + ", " + seed + "): divergence between t and m (polymorphic method)");
+							System.exit( 1 );
+						}
+			}
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.get(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.get(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): get() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): get() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( valEquals(m.get(KEY2OBJ(T)), t.get(KEY2OBJ(T))), "Error (" + level + ", " + seed + "): divergence between t and m (standard method)" );
+		}
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			VALUE_TYPE U = genValue();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				rm = m.put(KEY2OBJ(T), VALUE2OBJ(U));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				rt = t.put(KEY2OBJ(T), VALUE2OBJ(U));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): put() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): put() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( valEquals( rm, rt ), "Error (" + level + ", " + seed + "): divergence in put() between t and m (" + rt + ", " + rm + ")" );
+
+			T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( valEquals( rm, rt ), "Error (" + level + ", " + seed + "): divergence in remove() between t and m (" + rt + ", " + rm + ")" );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal" );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			Entry e = (Entry)i.next();
+			ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after removal (iterating on m)" );
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		  
+		for(Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after removal (iterating on t)");
+			ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		  
+		for(Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after removal (iterating on m)");
+			ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after removal (iterating on m)");
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		  
+		for(Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after removal (iterating on t)" );
+			ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		  
+		for(Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after removal (iterating on m)");
+			ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after removal (iterating on m)");
+		}
+
+		/* Now we check that both maps agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.firstKey();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.firstKey();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ on their first key (" + m.firstKey() + ", " + t.firstKey() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.lastKey();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.lastKey();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+		if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ on their last key (" + m.lastKey() + ", " + t.lastKey() +")");
+
+		/* Now we check cloning. */
+
+		if ( level == 0 ) {
+			ensure( m.equals( ((RB_TREE_MAP)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((RB_TREE_MAP)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+			m = (RB_TREE_MAP)((RB_TREE_MAP)m).clone();
+		}
+
+		int h = m.hashCode();
+
+
+		/* Now we save and read m. */
+
+		SORTED_MAP m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (SORTED_MAP)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if !#valueclass(Reference)
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+		/* Now we take out of m everything, and check that it is empty. */
+#else
+		m2.clear();
+		m2.putAll( m );
+#endif
+	  
+		for(Iterator i=t.keySet().iterator(); i.hasNext(); ) m2.remove(i.next());
+		  
+		ensure( m2.isEmpty(), "Error (" + level + ", " + seed + "): m2 is not empty (as it should be)" );
+
+		/* Now we play with iterators. */
+		  
+		{
+			java.util.ListIterator i, j;
+			Map.Entry E, F;
+			Object J;
+			i = (java.util.ListIterator)m.entrySet().iterator(); 
+			j = new java.util.LinkedList( t.entrySet() ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + level + ", " + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( (E=(Entry)i.next()).getKey().equals( J = (F=(Map.Entry)j.next()).getKey() ), "Error (" + level + ", " + seed + "): divergence in next()" );
+
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Object U = VALUE2OBJ(genValue());
+						E.setValue( U );
+                        t.put( F.getKey(), U );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( (E=(Entry)i.previous()).getKey().equals( J = (F=(Map.Entry)j.previous()).getKey() ), "Error (" + level + ", " + seed + "): divergence in previous()" );
+
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Object U = VALUE2OBJ(genValue());
+						E.setValue( U );
+                        t.put( F.getKey(), U );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + level + ", " + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + level + ", " + seed + "): divergence in previousIndex()" );
+
+			}
+		}
+
+		{
+			boolean badPrevious = false;
+			Object previous = null;
+			it.unimi.dsi.fastutil.BidirectionalIterator i;
+			java.util.ListIterator j;
+			Object I, J;
+			KEY_TYPE from = genKey();
+			j = new java.util.LinkedList( t.keySet() ).listIterator(); 
+			while( j.hasNext() ) {
+				Object k = j.next();
+				if ( ((Comparable)k).compareTo( KEY2OBJ( from ) ) > 0 ) {
+					badPrevious = true;
+					j.previous();
+					break;
+				}
+				previous = k;
+			}
+
+			i = (it.unimi.dsi.fastutil.BidirectionalIterator)((SORTED_SET)m.keySet()).iterator( from ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" + badPrevious );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+					//System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+
+					badPrevious = false;
+
+					if ( r.nextFloat() < 0.5 ) {
+						//System.err.println("Removing in next");
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+
+					if ( r.nextFloat() < 0.5 ) {
+						//System.err.println("Removing in prev");
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" );
+
+		/* Now we select a pair of keys and create a submap. */
+
+		if ( ! m.isEmpty() ) {
+			java.util.ListIterator i;
+			Object start = m.firstKey(), end = m.firstKey();
+			for( i = (java.util.ListIterator)m.keySet().iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() );
+			for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() );
+				
+			//System.err.println("Checking subMap from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.subMap( (KEY_CLASS) start, (KEY_CLASS)end ), t.subMap( start, end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subMap" );
+
+			//System.err.println("Checking headMap to " + end + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.headMap( (KEY_CLASS)end ), t.headMap( end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headMap" );
+
+			//System.err.println("Checking tailMap from " + start + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.tailMap( (KEY_CLASS)start ), t.tailMap( start ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailMap" );
+		}
+		  
+
+	}
+
+
+	private static void test( int n ) {
+		RB_TREE_MAP m = new RB_TREE_MAP();
+		SortedMap t = new java.util.TreeMap();
+		topMap = m;
+		k = new Object[n];
+		v = new Object[n];
+		nk = new Object[n];
+		kt = new KEY_TYPE[n];
+		nkt = new KEY_TYPE[n];
+		vt = new VALUE_TYPE[n];
+
+		for( int i = 0; i < n; i++ ) {
+#if #keyclass(Object)
+			k[i] = kt[i] = genKey();
+			nk[i] = nkt[i] = genKey();
+#else
+			k[i] = new KEY_CLASS( kt[i] = genKey() );
+			nk[i] = new KEY_CLASS( nkt[i] = genKey() );
+#endif
+#if #values(reference)
+			v[i] = vt[i] = genValue();
+#else
+			v[i] = new VALUE_CLASS( vt[i] = genValue() );
+#endif
+		}
+		  
+		/* We add pairs to t. */
+		for( int i = 0; i < n;  i++ ) t.put( k[i], v[i] );
+		  
+		/* We add to m the same data */
+		m.putAll(t);
+
+		testMaps( m, t, n, 0 );
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+
+}
diff --git a/drv/RBTreeSet.drv b/drv/RBTreeSet.drv
new file mode 100644
index 0000000..59c123a
--- /dev/null
+++ b/drv/RBTreeSet.drv
@@ -0,0 +1,2137 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.SortedSet;
+import java.util.NoSuchElementException;
+
+/** A type-specific red-black tree set with a fast, small-footprint implementation.
+ *
+ * <P>The iterators provided by this class are type-specific {@link
+ * it.unimi.dsi.fastutil.BidirectionalIterator bidirectional iterators}.
+ * Moreover, the iterator returned by <code>iterator()</code> can be safely cast
+ * to a type-specific {@linkplain java.util.ListIterator list iterator}.
+ */
+
+public class RB_TREE_SET KEY_GENERIC extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, Cloneable, SORTED_SET KEY_GENERIC {
+
+	/** A reference to the root entry. */
+	protected transient Entry KEY_GENERIC tree;
+
+	/** Number of elements in this set. */
+	protected int count;
+
+	/** The entry of the first element of this set. */
+	protected transient Entry KEY_GENERIC firstEntry;
+
+	/** The entry of the last element of this set. */
+	protected transient Entry KEY_GENERIC lastEntry;
+
+	/** This set's comparator, as provided in the constructor. */
+	protected Comparator<? super KEY_GENERIC_CLASS> storedComparator;
+
+	/** This set's actual comparator; it may differ from {@link #storedComparator} because it is
+		always a type-specific comparator, so it could be derived from the former by wrapping. */
+	protected transient KEY_COMPARATOR KEY_SUPER_GENERIC actualComparator;
+
+    private static final long serialVersionUID = -7046029254386353130L;
+
+	private static final boolean ASSERTS = ASSERTS_VALUE;
+
+	{
+		allocatePaths();
+	}
+
+	/** Creates a new empty tree set. 
+	 */
+
+	public RB_TREE_SET() {
+		tree = null;
+		count = 0;
+	}
+
+	/** Generates the comparator that will be actually used.
+	 *
+	 * <P>When a specific {@link Comparator} is specified and stored in {@link
+	 * #storedComparator}, we must check whether it is type-specific.  If it is
+	 * so, we can used directly, and we store it in {@link #actualComparator}. Otherwise,
+	 * we generate on-the-fly an anonymous class that wraps the non-specific {@link Comparator}
+	 * and makes it into a type-specific one.
+	 */
+	@SuppressWarnings("unchecked")
+	private void setActualComparator() {
+#if #keyclass(Object)
+		actualComparator = storedComparator;
+#else
+		/* If the provided comparator is already type-specific, we use it. Otherwise,
+		   we use a wrapper anonymous class to fake that it is type-specific. */
+		if ( storedComparator == null || storedComparator instanceof KEY_COMPARATOR ) actualComparator = (KEY_COMPARATOR)storedComparator;
+		else actualComparator =	new KEY_COMPARATOR KEY_GENERIC() {
+				public int compare( KEY_GENERIC_TYPE k1, KEY_GENERIC_TYPE k2 ) {
+					return storedComparator.compare( KEY2OBJ( k1 ), KEY2OBJ( k2 ) );
+				}
+				public int compare( KEY_CLASS ok1, KEY_CLASS ok2 ) {
+					return storedComparator.compare( ok1, ok2 );
+				}
+			};
+#endif
+	}
+	 
+
+	/** Creates a new empty tree set with the given comparator.
+	 *
+	 * @param c a {@link Comparator} (even better, a type-specific comparator).
+	 */
+
+	public RB_TREE_SET( final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this();
+		storedComparator = c;
+		setActualComparator();
+	}
+
+
+	/** Creates a new tree set copying a given collection.
+	 *
+	 * @param c a collection to be copied into the new tree set. 
+	 */
+	 
+	public RB_TREE_SET( final Collection<? extends KEY_GENERIC_CLASS> c ) {
+		this();
+		addAll( c );
+	}
+
+	/** Creates a new tree set copying a given sorted set (and its {@link Comparator}).
+	 *
+	 * @param s a {@link SortedSet} to be copied into the new tree set. 
+	 */
+	 
+	public RB_TREE_SET( final SortedSet<KEY_GENERIC_CLASS> s ) {
+		this( s.comparator() );
+		addAll( s );
+	}
+
+	/** Creates a new tree set copying a given type-specific collection.
+	 *
+	 * @param c a type-specific collection to be copied into the new tree set. 
+	 */
+	 
+	public RB_TREE_SET( final COLLECTION KEY_EXTENDS_GENERIC c ) {
+		this();
+		addAll( c );
+	}
+
+	/** Creates a new tree set copying a given type-specific sorted set (and its {@link Comparator}).
+	 *
+	 * @param s a type-specific sorted set to be copied into the new tree set. 
+	 */
+
+	public RB_TREE_SET( final SORTED_SET KEY_GENERIC s ) {
+		this( s.comparator() );
+		addAll( s );
+	}
+
+
+	/** Creates a new tree set using elements provided by a type-specific iterator.
+	 *
+	 * @param i a type-specific iterator whose elements will fill the set.
+	 */
+	 
+	public RB_TREE_SET( final KEY_ITERATOR KEY_EXTENDS_GENERIC i ) {
+		while( i.hasNext() ) add( i.NEXT_KEY() );
+	}
+
+
+	/** Creates a new tree set using elements provided by an iterator.
+	 *
+	 * @param i an iterator whose elements will fill the set.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public RB_TREE_SET( final Iterator<? extends KEY_GENERIC_CLASS> i ) {
+		this( ITERATORS.AS_KEY_ITERATOR( i ) );
+	}
+
+
+	/** Creates a new tree set and fills it with the elements of a given array using a given {@link Comparator}.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 * @param c a {@link Comparator} (even better, a type-specific comparator).
+	 */
+	 
+	public RB_TREE_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length, final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this( c );
+		ARRAYS.ensureOffsetLength( a, offset, length );
+		for( int i = 0; i < length; i++ ) add( a[ offset + i ] );
+	}
+
+
+	/** Creates a new tree set and fills it with the elements of a given array.
+	 *
+	 * @param a an array whose elements will be used to fill the set.
+	 * @param offset the first element to use.
+	 * @param length the number of elements to use.
+	 */
+	 
+	public RB_TREE_SET( final KEY_GENERIC_TYPE[] a, final int offset, final int length ) {
+		this( a, offset, length, null );
+	}
+
+
+	/** Creates a new tree set copying the elements of an array.
+	 *
+	 * @param a an array to be copied into the new tree set. 
+	 */
+	 
+	public RB_TREE_SET( final KEY_GENERIC_TYPE[] a ) {
+		this();
+		int i = a.length;
+		while( i-- != 0 ) add( a[ i ] );
+	}
+
+
+	/** Creates a new tree set copying the elements of an array using a given {@link Comparator}.
+	 *
+	 * @param a an array to be copied into the new tree set. 
+	 * @param c a {@link Comparator} (even better, a type-specific comparator).
+	 */
+	 
+	public RB_TREE_SET( final KEY_GENERIC_TYPE[] a, final Comparator<? super KEY_GENERIC_CLASS> c ) {
+		this( c );
+		int i = a.length;
+		while( i-- != 0 ) add( a[ i ] );
+	}
+
+
+
+
+	/*
+	 * The following methods implements some basic building blocks used by
+	 * all accessors.  They are (and should be maintained) identical to those used in RBTreeMap.drv.
+	 *
+	 * The add()/remove() code is derived from Ben Pfaff's GNU libavl
+	 * (http://www.msu.edu/~pfaffben/avl/). If you want to understand what's
+	 * going on, you should have a look at the literate code contained therein
+	 * first.  
+	 */
+
+
+	/** Compares two keys in the right way. 
+	 *
+	 * <P>This method uses the {@link #actualComparator} if it is non-<code>null</code>.
+	 * Otherwise, it resorts to primitive type comparisons or to {@link Comparable#compareTo(Object) compareTo()}.
+	 *
+	 * @param k1 the first key.
+	 * @param k2 the second key.
+	 * @return a number smaller than, equal to or greater than 0, as usual
+	 * (i.e., when k1 < k2, k1 = k2 or k1 > k2, respectively).
+	 */
+	 
+	@SuppressWarnings("unchecked")
+	final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) {
+		return actualComparator == null ? KEY_CMP( k1, k2 ) : actualComparator.compare( k1, k2 );
+	}
+
+
+
+	/** Returns the entry corresponding to the given key, if it is in the tree; <code>null</code>, otherwise.
+	 *
+	 * @param k the key to search for.
+	 * @return the corresponding entry, or <code>null</code> if no entry with the given key exists.
+	 */
+
+	private Entry KEY_GENERIC findKey( final KEY_GENERIC_TYPE k ) {
+		Entry KEY_GENERIC e = tree;
+		int cmp;
+
+		while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) 
+			e = cmp < 0 ? e.left() : e.right();
+		 
+		return e;
+	}
+
+	/** Locates a key.
+	 *
+	 * @param k a key.
+	 * @return the last entry on a search for the given key; this will be
+	 * the given key, if it present; otherwise, it will be either the smallest greater key or the greatest smaller key.
+	 */
+
+	final Entry KEY_GENERIC locateKey( final KEY_GENERIC_TYPE k ) {
+		Entry KEY_GENERIC e = tree, last = tree;
+		int cmp = 0;
+		  
+		while ( e != null && ( cmp = compare( k, e.key ) ) != 0 ) {
+			last = e;
+			e = cmp < 0 ? e.left() : e.right();
+		}
+		  
+		return cmp == 0 ? e : last;
+	}
+
+	/** This vector remembers the path and the direction followed during the
+	 *  current insertion. It suffices for about 2<sup>32</sup> entries. */
+	private transient boolean dirPath[];
+	private transient Entry KEY_GENERIC nodePath[];
+
+	@SuppressWarnings("unchecked")
+	private void allocatePaths() {
+		dirPath = new boolean[ 64 ];
+#if #keys(reference)
+		nodePath = new Entry[ 64 ];
+#else
+		nodePath = new Entry[ 64 ];
+#endif
+	}
+
+
+	public boolean add( final KEY_GENERIC_TYPE k ) {
+		int maxDepth = 0;
+
+		if ( tree == null ) { // The case of the empty tree is treated separately.
+			count++;
+			tree = lastEntry = firstEntry = new Entry KEY_GENERIC( k );
+		}
+		else {
+			Entry KEY_GENERIC p = tree, e;
+			int cmp, i = 0;
+
+			while( true ) {
+				if ( ( cmp = compare( k, p.key ) ) == 0 ) {
+					// We clean up the node path, or we could have stale references later.
+					while( i-- != 0 ) nodePath[ i ] = null;
+					return false;
+				}
+					 
+				nodePath[ i ] = p;
+					 
+				if ( dirPath[ i++ ] = cmp > 0 ) {
+					if ( p.succ() ) {
+						count++;
+						e = new Entry KEY_GENERIC( k );
+								
+						if ( p.right == null ) lastEntry = e;
+								
+						e.left = p;
+						e.right = p.right;
+								
+						p.right( e );
+								
+						break;
+					}
+
+					p = p.right;
+				}
+				else {
+					if ( p.pred() ) {
+						count++;
+						e = new Entry KEY_GENERIC( k );
+								
+						if ( p.left == null ) firstEntry = e;
+								
+						e.right = p;
+						e.left = p.left;
+								
+						p.left( e );
+
+						break;
+					}
+
+					p = p.left;
+				}
+			}
+
+			maxDepth = i--;
+
+			while( i > 0 && ! nodePath[ i ].black() ) {
+				if ( ! dirPath[ i - 1 ] ) {
+					Entry KEY_GENERIC y = nodePath[ i - 1 ].right;
+
+					if ( ! nodePath[ i - 1 ].succ() && ! y.black() ) {
+						nodePath[ i ].black( true );
+						y.black( true );
+						nodePath[ i - 1 ].black( false );
+						i -= 2;
+					}
+					else {
+						Entry KEY_GENERIC x;
+
+						if ( ! dirPath[ i ] ) y = nodePath[ i ];
+						else {
+							x = nodePath[ i ];
+							y = x.right;
+							x.right = y.left;
+							y.left = x;
+							nodePath[ i - 1 ].left = y;
+
+							if ( y.pred() ) {
+								y.pred( false );
+								x.succ( y );
+							}
+						}
+								
+						x = nodePath[ i - 1 ];
+						x.black( false );
+						y.black( true );
+								
+						x.left = y.right;
+						y.right = x;
+						if ( i < 2 ) tree = y;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = y;
+							else nodePath[ i - 2 ].left = y;
+						}
+
+						if ( y.succ() ) {
+							y.succ( false );
+							x.pred( y );
+						}
+						break;
+					}
+				}
+				else {
+					Entry KEY_GENERIC y = nodePath[ i - 1 ].left;
+						  
+					if ( ! nodePath[ i - 1 ].pred() && ! y.black() ) {
+						nodePath[ i ].black( true );
+						y.black( true );
+						nodePath[ i - 1 ].black( false );
+						i -= 2;
+					}
+					else {
+						Entry KEY_GENERIC x;
+
+						if ( dirPath[ i ] ) y = nodePath[ i ];
+						else {
+							x = nodePath[ i ];
+							y = x.left;
+							x.left = y.right;
+							y.right = x;
+							nodePath[ i - 1 ].right = y;
+	 
+							if ( y.succ() ) {
+								y.succ( false );
+								x.pred( y );
+							}
+	 
+						}
+
+						x = nodePath[ i - 1 ];
+						x.black( false );
+						y.black( true );
+
+						x.right = y.left;
+						y.left = x;
+						if ( i < 2 ) tree = y;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = y;
+							else nodePath[ i - 2 ].left = y;
+						}
+
+						if ( y.pred() ){
+							y.pred( false );
+							x.succ( y );
+						}
+
+						break;
+					}
+				}
+			}
+		}
+		tree.black( true );
+		// We clean up the node path, or we could have stale references later.
+		while( maxDepth-- != 0 ) nodePath[ maxDepth ] = null;
+		if ( ASSERTS ) {
+			checkNodePath();
+			checkTree( tree, 0, -1 );
+		}
+		return true;
+	}
+
+
+	@SuppressWarnings("unchecked")
+	public boolean remove( final KEY_TYPE k ) {
+		if ( tree == null ) return false;
+
+		Entry KEY_GENERIC p = tree;
+		int cmp;
+		int i = 0;
+		final KEY_GENERIC_TYPE kk = KEY_GENERIC_CAST k;
+
+		while( true ) {
+			if ( ( cmp = compare( kk, p.key ) ) == 0 ) break;
+
+			dirPath[ i ] = cmp > 0;
+			nodePath[ i ] = p;
+
+			if ( dirPath[ i++ ] ) {
+				if ( ( p = p.right() ) == null ) {
+					// We clean up the node path, or we could have stale references later.
+					while( i-- != 0 ) nodePath[ i ] = null;
+					return false;
+				}
+			}
+			else {
+				if ( ( p = p.left() ) == null ) {
+					// We clean up the node path, or we could have stale references later.
+					while( i-- != 0 ) nodePath[ i ] = null;
+					return false;
+				}
+			}
+		}
+
+		if ( p.left == null ) firstEntry = p.next();
+		if ( p.right == null ) lastEntry = p.prev();
+
+		if ( p.succ() ) {
+			if ( p.pred() ) {
+				if ( i == 0 ) tree = p.left;
+				else {
+					if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].succ( p.right );
+					else nodePath[ i - 1 ].pred( p.left );
+				}
+			}
+			else {
+				p.prev().right = p.right;
+
+				if ( i == 0 ) tree = p.left;
+				else {
+					if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].right = p.left;
+					else nodePath[ i - 1 ].left = p.left;
+				}
+			}
+		}
+		else {
+			boolean color;
+			Entry KEY_GENERIC r = p.right;
+
+			if ( r.pred() ) {
+				r.left = p.left;
+				r.pred( p.pred() );
+				if ( ! r.pred() ) r.prev().right = r;
+				if ( i == 0 ) tree = r;
+				else {
+					if ( dirPath[ i - 1 ] ) nodePath[ i - 1 ].right = r;
+					else nodePath[ i - 1 ].left = r;
+				}
+
+				color = r.black();
+				r.black( p.black() );
+				p.black( color );
+				dirPath[ i ] = true;
+				nodePath[ i++ ] = r;
+			}
+			else {
+				Entry KEY_GENERIC s;
+				int j = i++;
+
+				while( true ) {
+					dirPath[ i ] = false;
+					nodePath[ i++ ] = r;
+					s = r.left;
+					if ( s.pred() ) break;
+					r = s;
+				}
+
+				dirPath[ j ] = true;
+				nodePath[ j ] = s;
+
+				if ( s.succ() ) r.pred( s );
+				else r.left = s.right;
+
+				s.left = p.left;
+
+				if ( ! p.pred() ) {
+					p.prev().right =  s;
+					s.pred( false );
+				}
+
+				s.right( p.right );
+							
+				color = s.black();
+				s.black( p.black() );
+				p.black( color );
+	
+				if ( j == 0 ) tree = s;
+				else {
+					if ( dirPath[ j - 1 ] ) nodePath[ j - 1 ].right = s;
+					else nodePath[ j - 1 ].left = s;
+				}
+			}
+		}
+
+		int maxDepth = i;
+
+		if ( p.black() ) {
+			for( ; i > 0; i-- ) {
+				if ( dirPath[ i - 1 ] && ! nodePath[ i - 1 ].succ() ||
+					 ! dirPath[ i - 1 ] && ! nodePath[ i - 1 ].pred() ) {
+					Entry KEY_GENERIC x = dirPath[ i - 1 ] ? nodePath[ i - 1 ].right : nodePath[ i - 1 ].left;
+
+					if ( ! x.black() ) {
+						x.black( true );
+						break;
+					}
+				}
+
+				if ( ! dirPath[ i - 1 ] ) {
+					Entry KEY_GENERIC w = nodePath[ i - 1 ].right;
+
+					if ( ! w.black() ) {
+						w.black( true );
+						nodePath[ i - 1 ].black( false );
+
+						nodePath[ i - 1 ].right = w.left;
+						w.left = nodePath[ i - 1 ];
+
+						if ( i < 2 ) tree = w;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w;
+							else nodePath[ i - 2 ].left = w;
+						}
+
+						nodePath[ i ] = nodePath[ i - 1 ];
+						dirPath[ i ] = false;
+						nodePath[ i - 1 ] = w;
+						if ( maxDepth == i++ ) maxDepth++;
+
+						w = nodePath[ i - 1 ].right;
+					}
+
+					if ( ( w.pred() || w.left.black() ) &&
+						 ( w.succ() || w.right.black() ) ) {
+						w.black( false );
+					}
+					else {
+						if ( w.succ() || w.right.black() ) {
+							Entry KEY_GENERIC y = w.left;
+									 
+							y.black ( true );
+							w.black( false );
+							w.left = y.right;
+							y.right = w;
+							w = nodePath[ i - 1 ].right = y;
+
+							if ( w.succ() ) {
+								w.succ( false );
+								w.right.pred( w );
+							}
+						}
+
+						w.black( nodePath[ i - 1 ].black() );
+						nodePath[ i - 1 ].black( true );
+						w.right.black( true );
+
+						nodePath[ i - 1 ].right = w.left;
+						w.left = nodePath[ i - 1 ];
+
+						if ( i < 2 ) tree = w;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w;
+							else nodePath[ i - 2 ].left = w;
+						}
+
+						if ( w.pred() ) {
+							w.pred( false );
+							nodePath[ i - 1 ].succ( w );
+						}
+						break;
+					}
+				}
+				else {
+					Entry KEY_GENERIC w = nodePath[ i - 1 ].left;
+
+					if ( ! w.black() ) {
+						w.black ( true );
+						nodePath[ i - 1 ].black( false );
+
+						nodePath[ i - 1 ].left = w.right;
+						w.right = nodePath[ i - 1 ];
+
+						if ( i < 2 ) tree = w;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w;
+							else nodePath[ i - 2 ].left = w;
+						}
+
+						nodePath[ i ] = nodePath[ i - 1 ];
+						dirPath[ i ] = true;
+						nodePath[ i - 1 ] = w;
+						if ( maxDepth == i++ ) maxDepth++;
+
+						w = nodePath[ i - 1 ].left;
+					}
+						  
+					if ( ( w.pred() || w.left.black() ) &&
+						 ( w.succ() || w.right.black() ) ) {
+						w.black( false );
+					}
+					else {
+						if ( w.pred() || w.left.black() ) {
+							Entry KEY_GENERIC y = w.right;
+
+							y.black( true );
+							w.black ( false );
+							w.right = y.left;
+							y.left = w;
+							w = nodePath[ i - 1 ].left = y;
+
+							if ( w.pred() ) {
+								w.pred( false );
+								w.left.succ( w );
+							}
+						}
+
+						w.black( nodePath[ i - 1 ].black() );
+						nodePath[ i - 1 ].black( true );
+						w.left.black( true );
+
+						nodePath[ i - 1 ].left = w.right;
+						w.right = nodePath[ i - 1 ];
+									  
+						if ( i < 2 ) tree = w;
+						else {
+							if ( dirPath[ i - 2 ] ) nodePath[ i - 2 ].right = w;
+							else nodePath[ i - 2 ].left = w;
+						}
+
+						if ( w.succ() ) {
+							w.succ( false );
+							nodePath[ i - 1 ].pred( w );
+						}
+						break;
+					}
+				}
+			}
+
+			if ( tree != null ) tree.black( true );
+		}
+
+		count--;
+		// We clean up the node path, or we could have stale references later.
+		while( maxDepth-- != 0 ) nodePath[ maxDepth ] = null;
+		if ( ASSERTS ) {
+			checkNodePath();
+			checkTree( tree, 0, -1 );
+		}
+		return true;
+	}
+
+	@SuppressWarnings("unchecked")
+	public boolean contains( final KEY_TYPE k ) {
+		return findKey( KEY_GENERIC_CAST k ) != null;
+	}
+
+#if #keysclass(Object)
+	public K get( final KEY_TYPE k ) {
+		final Entry KEY_GENERIC entry = findKey( KEY_GENERIC_CAST k );
+		return entry == null ? null : entry.getKey();
+	}
+#endif
+
+	public void clear() {
+		count = 0;
+		tree = null;
+		firstEntry = lastEntry = null;
+	}
+
+	 
+	/** This class represent an entry in a tree set.
+	 *
+	 * <P>We use the only "metadata", i.e., {@link Entry#info}, to store
+	 * information about color, predecessor status and successor status.
+	 *
+	 * <P>Note that since the class is recursive, it can be
+	 * considered equivalently a tree.
+	 */
+
+	private static final class Entry KEY_GENERIC implements Cloneable {
+		/** The the bit in this mask is true, the node is black. */
+		private final static int BLACK_MASK = 1;
+		/** If the bit in this mask is true, {@link #right} points to a successor. */
+		private final static int SUCC_MASK = 1 << 31;
+		/** If the bit in this mask is true, {@link #left} points to a predecessor. */
+		private final static int PRED_MASK = 1 << 30;
+		/** The key of this entry. */
+		KEY_GENERIC_TYPE key;
+		/** The pointers to the left and right subtrees. */
+		Entry KEY_GENERIC left, right;
+		/** This integers holds different information in different bits (see {@link #SUCC_MASK}, {@link #PRED_MASK} and {@link #BLACK_MASK}). */
+		int info;
+
+		Entry() {}
+
+		/** Creates a new red entry with the given key.
+		 *
+		 * @param k a key.
+		 */
+		Entry( final KEY_GENERIC_TYPE k ) {
+			this.key = k;
+			info = SUCC_MASK | PRED_MASK;
+		}
+		  
+		/** Returns the left subtree. 
+		 *
+		 * @return the left subtree (<code>null</code> if the left
+		 * subtree is empty).
+		 */
+		Entry KEY_GENERIC left() {
+			return ( info & PRED_MASK ) != 0 ? null : left;
+		}
+		  
+		/** Returns the right subtree. 
+		 *
+		 * @return the right subtree (<code>null</code> if the right
+		 * subtree is empty).
+		 */
+		Entry KEY_GENERIC right() {
+			return ( info & SUCC_MASK ) != 0 ? null : right;
+		}
+		  
+		/** Checks whether the left pointer is really a predecessor.
+		 * @return true if the left pointer is a predecessor.
+		 */
+		boolean pred() {
+			return ( info & PRED_MASK ) != 0;
+		}
+		  
+		/** Checks whether the right pointer is really a successor.
+		 * @return true if the right pointer is a successor.
+		 */
+		boolean succ() {
+			return ( info & SUCC_MASK ) != 0;
+		}
+		  
+		/** Sets whether the left pointer is really a predecessor.
+		 * @param pred if true then the left pointer will be considered a predecessor.
+		 */
+		void pred( final boolean pred ) {
+			if ( pred ) info |= PRED_MASK;
+			else info &= ~PRED_MASK;
+		}
+		  
+		/** Sets whether the right pointer is really a successor.
+		 * @param succ if true then the right pointer will be considered a successor.
+		 */
+		void succ( final boolean succ ) {
+			if ( succ ) info |= SUCC_MASK;
+			else info &= ~SUCC_MASK;
+		}
+		  
+		/** Sets the left pointer to a predecessor.
+		 * @param pred the predecessr.
+		 */
+		void pred( final Entry KEY_GENERIC pred ) {
+			info |= PRED_MASK;
+			left = pred;
+		}
+		  
+		/** Sets the right pointer to a successor.
+		 * @param succ the successor.
+		 */
+		void succ( final Entry KEY_GENERIC succ ) {
+			info |= SUCC_MASK;
+			right = succ;
+		}
+		  
+		/** Sets the left pointer to the given subtree.
+		 * @param left the new left subtree.
+		 */
+		void left( final Entry KEY_GENERIC left ) {
+			info &= ~PRED_MASK;
+			this.left = left;
+		}
+		  
+		/** Sets the right pointer to the given subtree.
+		 * @param right the new right subtree.
+		 */
+		void right( final Entry KEY_GENERIC right ) {
+			info &= ~SUCC_MASK;
+			this.right = right;
+		}
+		  
+		/** Returns whether this node is black.
+		 * @return true iff this node is black.
+		 */
+		boolean black() {
+			return ( info & BLACK_MASK ) != 0;
+		}
+
+		/** Sets whether this node is black.
+		 * @param black if true, then this node becomes black; otherwise, it becomes red..
+		 */
+		void black( final boolean black ) {
+			if ( black ) info |= BLACK_MASK;
+			else info &= ~BLACK_MASK;
+		}
+
+		/** Computes the next entry in the set order.
+		 *
+		 * @return the next entry (<code>null</code>) if this is the last entry).
+		 */
+
+		Entry KEY_GENERIC next() {
+			Entry KEY_GENERIC next = this.right;
+			if ( ( info & SUCC_MASK ) == 0 ) while ( ( next.info & PRED_MASK ) == 0 ) next = next.left;
+			return next;
+		}
+
+		/** Computes the previous entry in the set order.
+		 *
+		 * @return the previous entry (<code>null</code>) if this is the first entry).
+		 */
+
+		Entry KEY_GENERIC prev() {
+			Entry KEY_GENERIC prev = this.left;
+			if ( ( info & PRED_MASK ) == 0 ) while ( ( prev.info & SUCC_MASK ) == 0 ) prev = prev.right;
+			return prev;
+		}
+
+		@SuppressWarnings("unchecked")
+		public Entry KEY_GENERIC clone() {
+			Entry KEY_GENERIC c;
+			try {
+				c = (Entry KEY_GENERIC)super.clone();
+			}
+			catch(CloneNotSupportedException cantHappen) {
+				throw new InternalError();
+			}
+
+			c.key = key;
+			c.info = info;
+
+			return c;
+		}
+
+		public boolean equals( final Object o ) {
+			if (!(o instanceof Entry)) return false;
+			Entry KEY_GENERIC_WILDCARD e = (Entry KEY_GENERIC_WILDCARD)o;
+				
+			return KEY_EQUALS(key, e.key);
+		}
+		  
+		public int hashCode() {
+			return KEY2JAVAHASH(key);
+		}
+		  
+		  
+		public String toString() {
+			return String.valueOf( key );
+		}
+		  
+		/*
+		  public void prettyPrint() {
+		  prettyPrint(0);
+		  }
+
+
+		  public void prettyPrint(int level) {
+		  if ( pred() ) {
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println("pred: " + left );
+		  }
+		  else if (left != null)
+		  left.prettyPrint(level +1 );
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println(key + " (" + (black() ? "black" : "red") + ")");
+		  if ( succ() ) {
+		  for (int i = 0; i < level; i++)
+		  System.err.print("  ");
+		  System.err.println("succ: " + right );
+		  }
+		  else if (right != null)
+		  right.prettyPrint(level + 1);
+		  }*/
+	}
+	 
+	/*
+	  public void prettyPrint() {
+	  System.err.println("size: " + count);
+	  if (tree != null) tree.prettyPrint();
+	  }
+	*/
+	
+	public int size() {
+		return count;
+	}
+	 
+	public boolean isEmpty() {
+		return count == 0;
+	}
+	 
+	public KEY_GENERIC_TYPE FIRST() {
+		if ( tree == null ) throw new NoSuchElementException();
+		return firstEntry.key;
+	}
+	 
+	public KEY_GENERIC_TYPE LAST() {
+		if ( tree == null ) throw new NoSuchElementException();
+		return lastEntry.key;
+	}
+	 
+
+	/** An iterator on the whole range.
+	 *
+	 * <P>This class can iterate in both directions on a threaded tree.
+	 */
+
+	private class SetIterator extends KEY_ABSTRACT_LIST_ITERATOR KEY_GENERIC {
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). */
+		Entry KEY_GENERIC prev;
+		/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). */
+		Entry KEY_GENERIC next;
+		/** The last entry that was returned (or <code>null</code> if we did not iterate or used {@link #remove()}). */
+		Entry KEY_GENERIC curr;
+		/** The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this iterator has been created using the nonempty constructor.*/
+		int index = 0;
+		  
+		SetIterator() {
+			next = firstEntry;
+		}
+
+		SetIterator( final KEY_GENERIC_TYPE k ) {
+			if ( ( next = locateKey( k ) ) != null ) {
+				if ( compare( next.key, k ) <= 0 ) {
+					prev = next;
+					next = next.next();
+				}
+				else prev = next.prev();
+			}
+		}
+
+		public boolean hasNext() { return next != null; }
+		public boolean hasPrevious() { return prev != null; }
+
+		void updateNext() {
+			next = next.next();
+		}
+
+		Entry KEY_GENERIC nextEntry() {
+			if ( ! hasNext() ) throw new NoSuchElementException();
+			curr = prev = next;
+			index++;
+			updateNext();
+			return curr;
+		}
+
+		public KEY_GENERIC_TYPE NEXT_KEY() { return nextEntry().key; }
+		public KEY_GENERIC_TYPE PREV_KEY() { return previousEntry().key; }
+
+		void updatePrevious() {
+			prev = prev.prev();
+		}
+
+		Entry KEY_GENERIC previousEntry() {
+			if ( ! hasPrevious() ) throw new NoSuchElementException();
+			curr = next = prev;
+			index--;
+			updatePrevious();
+			return curr;
+		}
+
+		public int nextIndex() {
+			return index;
+		}
+
+		public int previousIndex() {
+			return index - 1;
+		}
+
+		public void remove() {
+			if ( curr == null ) throw new IllegalStateException();
+			/* If the last operation was a next(), we are removing an entry that preceeds
+			   the current index, and thus we must decrement it. */
+			if ( curr == prev ) index--;
+			next = prev = curr;
+			updatePrevious();
+			updateNext();
+			RB_TREE_SET.this.remove( curr.key );
+			curr = null;
+		}
+	}
+
+
+	public KEY_BIDI_ITERATOR KEY_GENERIC iterator() {
+		return new SetIterator();
+	}
+
+	public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) {
+		return new SetIterator( from );
+	}
+
+	public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+		return actualComparator;
+	}
+
+	public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) {
+		return new Subset( KEY_NULL, true, to, false );
+	}
+
+	public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) {
+		return new Subset( from, false, KEY_NULL, true );
+	}
+
+	public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) {
+		return new Subset( from, false, to, false );
+	}
+
+
+
+	/** A subset with given range.
+	 *
+	 * <P>This class represents a subset. One has to specify the left/right
+	 * limits (which can be set to -∞ or ∞). Since the subset is a
+	 * view on the set, at a given moment it could happen that the limits of
+	 * the range are not any longer in the main set. Thus, things such as
+	 * {@link java.util.SortedSet#first()} or {@link java.util.Collection#size()} must be always computed
+	 * on-the-fly.  
+	 */
+	private final class Subset extends ABSTRACT_SORTED_SET KEY_GENERIC implements java.io.Serializable, SORTED_SET KEY_GENERIC {
+    	private static final long serialVersionUID = -7046029254386353129L;
+
+		/** The start of the subset range, unless {@link #bottom} is true. */
+		KEY_GENERIC_TYPE from;
+		/** The end of the subset range, unless {@link #top} is true. */
+		KEY_GENERIC_TYPE to;
+		/** If true, the subset range starts from -∞. */
+		boolean bottom;
+		/** If true, the subset range goes to ∞. */
+		boolean top;
+		  
+		/** Creates a new subset with given key range.
+		 *
+		 * @param from the start of the subset range.
+		 * @param bottom if true, the first parameter is ignored and the range starts from -∞.
+		 * @param to the end of the subset range.
+		 * @param top if true, the third parameter is ignored and the range goes to ∞.
+		 */
+		public Subset( final KEY_GENERIC_TYPE from, final boolean bottom, final KEY_GENERIC_TYPE to, final boolean top ) {
+			if ( ! bottom && ! top && RB_TREE_SET.this.compare( from, to ) > 0 ) throw new IllegalArgumentException( "Start element (" + from  + ") is larger than end element (" + to + ")" );
+
+			this.from = from;
+			this.bottom = bottom;
+			this.to = to;
+			this.top = top;
+		}
+
+		public void clear() {
+			final SubsetIterator i = new SubsetIterator();
+			while( i.hasNext() ) {
+				i.next();
+				i.remove();
+			}
+		}
+
+		/** Checks whether a key is in the subset range.
+		 * @param k a key.
+		 * @return true if is the key is in the subset range.
+		 */
+		final boolean in( final KEY_GENERIC_TYPE k ) {
+			return ( bottom || RB_TREE_SET.this.compare( k, from ) >= 0 ) &&
+				( top || RB_TREE_SET.this.compare( k, to ) < 0 );
+		}
+
+		@SuppressWarnings("unchecked")
+		public boolean contains( final KEY_TYPE k ) {
+			return in( KEY_GENERIC_CAST k ) && RB_TREE_SET.this.contains( k );
+		}
+
+		public boolean add( final KEY_GENERIC_TYPE  k ) {
+			if ( ! in( k ) ) throw new IllegalArgumentException( "Element (" + k + ") out of range [" + ( bottom ? "-" : String.valueOf( from ) ) + ", " + ( top ? "-" : String.valueOf( to ) ) + ")" ); 
+			return RB_TREE_SET.this.add( k );
+		}
+
+		@SuppressWarnings("unchecked")
+		public boolean remove( final KEY_TYPE k ) {
+			if ( ! in( KEY_GENERIC_CAST k ) ) return false;
+			return RB_TREE_SET.this.remove( k );
+		}
+
+		public int size() {
+			final SubsetIterator i = new SubsetIterator();
+			int n = 0;
+				
+			while( i.hasNext() ) {
+				n++;
+				i.next();
+			}
+				
+			return n;
+		}
+
+
+		public boolean isEmpty() {
+			return ! new SubsetIterator().hasNext();
+		}
+		  
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() {
+			return actualComparator;
+		}
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() {
+			return new SubsetIterator();
+		}
+		  
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) {
+			return new SubsetIterator( from );
+		}
+		  
+		public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) {
+			if ( top ) return new Subset( from, bottom, to, false );
+			return compare( to, this.to ) < 0 ? new Subset( from, bottom, to, false ) : this;
+		}
+		  
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) {
+			if ( bottom ) return new Subset( from, false, to, top );
+			return compare( from, this.from ) > 0 ? new Subset( from, false, to, top ) : this;
+		}
+		  
+		public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) {
+			if ( top && bottom ) return new Subset( from, false, to, false );
+			if ( ! top ) to = compare( to, this.to ) < 0 ? to : this.to;
+			if ( ! bottom ) from = compare( from, this.from ) > 0 ? from : this.from;
+            if ( ! top && ! bottom && from == this.from && to == this.to ) return this;
+			return new Subset( from, false, to, false );
+		}
+
+
+		/** Locates the first entry.
+		 *
+		 * @return the first entry of this subset, or <code>null</code> if the subset is empty.
+		 */
+		public RB_TREE_SET.Entry KEY_GENERIC firstEntry() {
+			if ( tree == null ) return null;
+			// If this subset goes to -infinity, we return the main set first entry; otherwise, we locate the start of the set.
+			RB_TREE_SET.Entry KEY_GENERIC e;
+			if ( bottom ) e = firstEntry;
+			else {
+				e = locateKey( from );
+				// If we find either the start or something greater we're OK.
+				if ( compare( e.key, from ) < 0 ) e = e.next();
+			}
+			// Finally, if this subset doesn't go to infinity, we check that the resulting key isn't greater than the end.
+			if ( e == null || ! top && compare( e.key, to ) >= 0 ) return null;
+			return e;
+		}
+	 
+		/** Locates the last entry.
+		 *
+		 * @return the last entry of this subset, or <code>null</code> if the subset is empty.
+		 */
+		public RB_TREE_SET.Entry KEY_GENERIC lastEntry() {
+			if ( tree == null ) return null;
+			// If this subset goes to infinity, we return the main set last entry; otherwise, we locate the end of the set.
+			RB_TREE_SET.Entry KEY_GENERIC e;
+			if ( top ) e = lastEntry;
+			else {
+				e = locateKey( to );
+				// If we find something smaller than the end we're OK.
+				if ( compare( e.key, to ) >= 0 ) e = e.prev();
+			}
+			// Finally, if this subset doesn't go to -infinity, we check that the resulting key isn't smaller than the start.
+			if ( e == null || ! bottom && compare( e.key, from ) < 0 ) return null;
+			return e;
+		}
+
+
+		public KEY_GENERIC_TYPE FIRST() {
+			RB_TREE_SET.Entry KEY_GENERIC e = firstEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.key;
+		}
+
+		public KEY_GENERIC_TYPE LAST() {
+			RB_TREE_SET.Entry KEY_GENERIC e = lastEntry();
+			if ( e == null ) throw new NoSuchElementException();
+			return e.key;
+		}
+	 
+		/** An iterator for subranges.
+		 * 
+		 * <P>This class inherits from {@link SetIterator}, but overrides the methods that
+		 * update the pointer after a {@link java.util.ListIterator#next()} or {@link java.util.ListIterator#previous()}. If we would
+		 * move out of the range of the subset we just overwrite the next or previous
+		 * entry with <code>null</code>.
+		 */
+		private final class SubsetIterator extends SetIterator {
+			SubsetIterator() {
+				next = firstEntry();
+			}
+
+			SubsetIterator( final KEY_GENERIC_TYPE k ) {
+				this();
+
+				if ( next != null ) {
+					if ( ! bottom && compare( k, next.key ) < 0 ) prev = null;
+					else if ( ! top && compare( k, ( prev = lastEntry() ).key ) >= 0 ) next = null;
+					else {
+						next = locateKey( k );
+								
+						if ( compare( next.key, k ) <= 0 ) {
+							prev = next;
+							next = next.next();
+						}
+						else prev = next.prev();
+					}
+				}
+			}
+
+			void updatePrevious() {
+				prev = prev.prev();
+				if ( ! bottom && prev != null && RB_TREE_SET.this.compare( prev.key, from ) < 0 ) prev = null;
+			}
+				
+			void updateNext() {
+				next = next.next();
+				if ( ! top && next != null && RB_TREE_SET.this.compare( next.key, to ) >= 0 ) next = null;
+			}
+		}
+	}
+
+
+
+	/** Returns a deep copy of this tree set.
+	 *
+	 * <P>This method performs a deep copy of this tree set; the data stored in the
+	 * set, however, is not cloned. Note that this makes a difference only for object keys.
+	 *
+	 * @return a deep copy of this tree set.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public Object clone() {
+		RB_TREE_SET KEY_GENERIC c;
+		try {
+			c = (RB_TREE_SET KEY_GENERIC)super.clone();
+		}
+		catch(CloneNotSupportedException cantHappen) {
+			throw new InternalError();
+		}
+
+		c.allocatePaths();
+
+		if ( count != 0 ) {
+			// Also this apparently unfathomable code is derived from GNU libavl.
+			Entry KEY_GENERIC e, p, q, rp = new Entry KEY_GENERIC(), rq = new Entry KEY_GENERIC();
+
+			p = rp;
+			rp.left( tree );
+
+			q = rq;
+			rq.pred( null );
+
+			while( true ) {
+				if ( ! p.pred() ) {
+					e = p.left.clone();
+					e.pred( q.left );
+					e.succ( q );
+					q.left( e );
+
+					p = p.left;
+					q = q.left;
+				}
+				else {
+					while( p.succ() ) {
+						p = p.right;
+
+						if ( p == null ) {
+							q.right = null;
+							c.tree = rq.left;
+
+							c.firstEntry = c.tree;
+							while( c.firstEntry.left != null ) c.firstEntry = c.firstEntry.left;
+							c.lastEntry = c.tree;
+							while( c.lastEntry.right != null ) c.lastEntry = c.lastEntry.right;
+
+							return c;
+						}
+						q = q.right;
+					}
+								
+					p = p.right;
+					q = q.right;
+				}
+
+				if ( ! p.succ() ) {
+					e = p.right.clone();
+					e.succ( q.right );
+					e.pred( q );
+					q.right( e );
+				}
+			}
+		}
+
+		return c;
+	}
+	 
+
+	private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
+		int n = count;
+		SetIterator i = new SetIterator();
+
+		s.defaultWriteObject(); 
+		while( n-- != 0 ) s.WRITE_KEY( i.NEXT_KEY() );
+	}
+
+
+	/** Reads the given number of entries from the input stream, returning the corresponding tree. 
+	 *
+	 * @param s the input stream.
+	 * @param n the (positive) number of entries to read.
+	 * @param pred the entry containing the key that preceeds the first key in the tree.
+	 * @param succ the entry containing the key that follows the last key in the tree.
+	 */
+	@SuppressWarnings("unchecked")
+	private Entry KEY_GENERIC readTree( final java.io.ObjectInputStream s, final int n, final Entry KEY_GENERIC pred, final Entry KEY_GENERIC succ ) throws java.io.IOException, ClassNotFoundException {
+		if ( n == 1 ) {
+			final Entry KEY_GENERIC top = new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() );
+			top.pred( pred );
+			top.succ( succ );
+			top.black( true );
+
+			return top;
+		}
+
+		if ( n == 2 ) {
+			/* We handle separately this case so that recursion will
+			 *always* be on nonempty subtrees. */
+			final Entry KEY_GENERIC top = new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() );
+			top.black( true );
+			top.right( new Entry KEY_GENERIC( KEY_GENERIC_CAST s.READ_KEY() ) );
+			top.right.pred( top );
+			top.pred( pred );
+			top.right.succ( succ );
+			
+			return top;
+		}
+
+		// The right subtree is the largest one.
+		final int rightN = n / 2, leftN = n - rightN - 1;
+
+		final Entry KEY_GENERIC top = new Entry KEY_GENERIC();
+
+		top.left( readTree( s, leftN, pred, top ) );
+		
+		top.key = KEY_GENERIC_CAST s.READ_KEY();
+		top.black( true );
+
+		top.right( readTree( s, rightN, top, succ ) );
+
+		if ( n + 2 == ( ( n + 2 )  & -( n + 2 ) ) ) top.right.black( false ); // Quick test for determining whether n + 2 is a power of 2.
+
+		return top;
+	}
+
+
+	private void readObject( java.io.ObjectInputStream s ) throws java.io.IOException, ClassNotFoundException {
+		s.defaultReadObject();
+		/* The storedComparator is now correctly set, but we must restore
+		   on-the-fly the actualComparator. */
+		setActualComparator();
+		allocatePaths();
+
+		if ( count != 0 ) {
+			tree = readTree( s, count, null, null );
+			Entry KEY_GENERIC e;
+
+			e = tree;
+			while( e.left() != null ) e = e.left();
+			firstEntry = e;
+
+			e = tree;
+			while( e.right() != null ) e = e.right();
+			lastEntry = e;
+		}
+
+		if ( ASSERTS ) checkTree( tree, 0, -1 );
+	}
+
+
+#ifdef ASSERTS_CODE
+	private void checkNodePath() {
+		for( int i = nodePath.length; i-- != 0; ) assert nodePath[ i ] == null : i;
+	}
+
+	private static KEY_GENERIC int checkTree( Entry KEY_GENERIC e, int d, int D ) {
+		if ( e == null ) return 0;
+		if ( e.black() ) d++;
+		if ( e.left() != null ) D = checkTree( e.left(), d, D );
+		if ( e.right() != null ) D = checkTree( e.right(), d, D );
+		if ( e.left() == null && e.right() == null ) {
+			if ( D == -1 ) D = d;
+			else if ( D != d ) throw new AssertionError( "Mismatch between number of black nodes (" + D + " and " + d + ")" );
+		}
+		return D;
+	}
+#else
+	private void checkNodePath() {}
+	@SuppressWarnings("unused")
+	private int checkTree( Entry KEY_GENERIC e, int d, int D ) { return 0; }
+#endif
+
+#ifdef TEST
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+	}
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		int i, j;
+		RB_TREE_SET m;
+		java.util.TreeSet t;
+		KEY_TYPE k[] = new KEY_TYPE[n];
+		KEY_TYPE nk[] = new KEY_TYPE[n];
+		long ms;
+
+		for( i = 0; i < n; i++ ) {
+			k[i] = genKey();
+			nk[i] = genKey();
+		}
+		  
+		double totAdd = 0, totYes = 0, totNo = 0, totIterFor = 0, totIterBack = 0, totRemYes = 0, d, dd;
+
+		if ( comp ) {
+			for( j = 0; j < 20; j++ ) {
+
+				t = new java.util.TreeSet();
+
+				/* We first add all pairs to t. */
+				for( i = 0; i < n;  i++ ) t.add( KEY2OBJ( k[i] ) );
+
+				/* Then we remove the first half and put it back. */
+				for( i = 0; i < n/2;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+
+				ms = System.currentTimeMillis();
+				for( i = 0; i < n/2;  i++ ) t.add( KEY2OBJ( k[i] ) );
+				d = System.currentTimeMillis() - ms;
+
+				/* Then we remove the other half and put it back again. */
+				ms = System.currentTimeMillis();
+				for( i = n/2; i < n;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+				dd = System.currentTimeMillis() - ms ;
+
+				ms = System.currentTimeMillis();
+				for( i = n/2; i < n;  i++ ) t.add( KEY2OBJ( k[i] ) );
+				d += System.currentTimeMillis() - ms;
+				if ( j > 2 ) totAdd += n/d; 				
+				System.out.print("Add: " + format( n/d ) +" K/s " );
+
+				/* Then we remove again the first half. */
+				ms = System.currentTimeMillis();
+				for( i = 0; i < n/2;  i++ ) t.remove( KEY2OBJ( k[i] ) );
+				dd += System.currentTimeMillis() - ms ;
+				if ( j > 2 ) totRemYes += n/dd; 				
+				System.out.print("RemYes: " + format( n/dd ) +" K/s " );
+
+				/* And then we put it back. */
+				for( i = 0; i < n/2;  i++ ) t.add( KEY2OBJ( k[i] ) );
+
+				/* We check for pairs in t. */
+				ms = System.currentTimeMillis();
+				for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( k[i] ) );
+				d = 1.0 * n / (System.currentTimeMillis() - ms );
+				if ( j > 2 ) totYes += d; 				
+				System.out.print("Yes: " + format( d ) +" K/s " );
+
+				/* We check for pairs not in t. */
+				ms = System.currentTimeMillis();
+				for( i = 0; i < n;  i++ ) t.contains( KEY2OBJ( nk[i] ) );
+				d = 1.0 * n / (System.currentTimeMillis() - ms );
+				if ( j > 2 ) totNo += d; 				
+				System.out.print("No: " + format( d ) +" K/s " );
+
+				/* We iterate on t. */
+				ms = System.currentTimeMillis();
+				for( Iterator it = t.iterator(); it.hasNext(); it.next() );
+				d = 1.0 * n / (System.currentTimeMillis() - ms );
+				if ( j > 2 ) totIterFor += d; 				
+				System.out.print("IterFor: " + format( d ) +" K/s " );
+				
+				System.out.println();
+			}
+
+			System.out.println();
+			System.out.println( "java.util Add: " + format( totAdd/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) )  + " K/s"  );
+
+			System.out.println();
+
+			totAdd = totYes = totNo = totIterFor = totIterBack = totRemYes = 0;
+
+		}
+
+		for( j = 0; j < 20; j++ ) {
+
+			m = new RB_TREE_SET();
+
+
+			/* We first add all pairs to m. */
+			for( i = 0; i < n;  i++ ) m.add( k[i] );
+
+			/* Then we remove the first half and put it back. */
+			for( i = 0; i < n/2;  i++ ) m.remove( k[i] );
+
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) m.add( k[i] );
+			d = System.currentTimeMillis() - ms;
+
+			/* Then we remove the other half and put it back again. */
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) m.remove( k[i] );
+			dd = System.currentTimeMillis() - ms ;
+
+			ms = System.currentTimeMillis();
+			for( i = n/2; i < n;  i++ ) m.add( k[i] );
+			d += System.currentTimeMillis() - ms;
+			if ( j > 2 ) totAdd += n/d; 				
+			System.out.print("Add: " + format( n/d ) +" K/s " );
+
+			/* Then we remove again the first half. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n/2;  i++ ) m.remove( k[i] );
+			dd += System.currentTimeMillis() - ms ;
+			if ( j > 2 ) totRemYes += n/dd; 				
+			System.out.print("RemYes: " + format( n/dd ) +" K/s " );
+
+			/* And then we put it back. */
+			for( i = 0; i < n/2;  i++ ) m.add( k[i] );
+
+			/* We check for pairs in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( k[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totYes += d; 				
+			System.out.print("Yes: " + format( d ) +" K/s " );
+
+			/* We check for pairs not in m. */
+			ms = System.currentTimeMillis();
+			for( i = 0; i < n;  i++ ) m.contains( nk[i] );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totNo += d; 				
+			System.out.print("No: " + format( d ) +" K/s " );
+
+			/* We iterate on m. */
+			KEY_LIST_ITERATOR it = (KEY_LIST_ITERATOR)m.iterator();
+			ms = System.currentTimeMillis();
+			for( ; it.hasNext(); it.NEXT_KEY() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterFor += d; 				
+			System.out.print("IterFor: " + format( d ) +" K/s " );
+				
+			/* We iterate back on m. */
+			ms = System.currentTimeMillis();
+			for( ; it.hasPrevious(); it.PREV_KEY() );
+			d = 1.0 * n / (System.currentTimeMillis() - ms );
+			if ( j > 2 ) totIterBack += d; 				
+			System.out.print("IterBack: " + format( d ) +" K/s " );
+				
+			System.out.println();
+		}
+
+
+		System.out.println();
+		System.out.println( "fastutil  Add: " + format( totAdd/(j-3) ) + " K/s RemYes: " + format( totRemYes/(j-3) ) + " K/s Yes: " + format( totYes/(j-3) ) + " K/s No: " + format( totNo/(j-3) ) + " K/s IterFor: " + format( totIterFor/(j-3) )  + " K/s IterBack: " + format( totIterBack/(j-3) ) + "K/s"  );
+
+		System.out.println();
+	}
+
+
+	private static boolean valEquals(Object o1, Object o2) {
+		return o1 == null ? o2 == null : o1.equals(o2);
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static Object[] k, v, nk;
+	private static KEY_TYPE kt[];
+	private static KEY_TYPE nkt[];
+	private static RB_TREE_SET topSet;
+
+	protected static void testSets( SORTED_SET m, SortedSet t, int n, int level ) {
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement;
+		boolean rt = false, rm = false;
+
+		if ( level > 4 ) return;
+				
+
+		/* Now we check that both sets agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.first();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.first();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): first() divergence at start in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.first().equals( m.first() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.first() + ", " + t.first() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.last();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.last();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): last() divergence at start in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+
+		if ( ! mThrowsNoElement ) ensure( t.last().equals( m.last() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.last() + ", " + t.last() +")");
+
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+
+
+		/* Now we check that m actually holds that data. */
+		for(Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(Iterator i=m.iterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" );
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.contains(T);
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( m.contains(KEY2OBJ(T)) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in keys between t and m (polymorphic method)" );
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( m.contains(KEY2OBJ(T)) ==  t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence between t and m (standard method)" );
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				rm = m.add(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				rt = t.add(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): add() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): add() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in add() between t and m" );
+
+			T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in remove() between t and m" );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal" );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(Iterator i=m.iterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on m)" );
+		}
+
+		/* Now we check that both sets agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.first();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.first();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): first() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.first().equals( m.first() ), "Error (" + level + ", " + seed + "): m and t differ on their first key (" + m.first() + ", " + t.first() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.last();
+		}
+		catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.last();
+		}
+		catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): last() divergence in NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+		if ( ! mThrowsNoElement ) ensure( t.last().equals( m.last() ), "Error (" + level + ", " + seed + "): m and t differ on their last key (" + m.last() + ", " + t.last() +")");
+
+		/* Now we check cloning. */
+
+		if ( level == 0 ) {
+			ensure( m.equals( ((RB_TREE_SET)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((RB_TREE_SET)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+			m = (RB_TREE_SET)((RB_TREE_SET)m).clone();
+		}
+
+		/* Now we play with constructors. */
+		ensure( m.equals( new RB_TREE_SET( (Collection)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( Collection m )" );
+		ensure( ( new RB_TREE_SET( (Collection)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( Collection m )does not equal m" );
+		ensure( m.equals( new RB_TREE_SET( (COLLECTION)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( type-specific Collection m )" );
+		ensure( ( new RB_TREE_SET( (COLLECTION)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( type-specific Collection m ) does not equal m" );
+		ensure( m.equals( new RB_TREE_SET( (SortedSet)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( SortedSet m )" );
+		ensure( ( new RB_TREE_SET( (SortedSet)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( SortedSet m ) does not equal m" );
+		ensure( m.equals( new RB_TREE_SET( (SORTED_SET)m ) ), "Error (" + level + ", " + seed + "): m does not equal new ( type-specific SortedSet m )" );
+		ensure( ( new RB_TREE_SET( (SORTED_SET)m ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( type-specific SortedSet m ) does not equal m" );
+		ensure( m.equals( new RB_TREE_SET( m.iterator() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( m.iterator() )" );
+		ensure( ( new RB_TREE_SET( m.iterator() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( m.iterator() ) does not equal m" );
+		ensure( m.equals( new RB_TREE_SET( m.iterator() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( m.type_specific_iterator() )" );
+		ensure( ( new RB_TREE_SET( m.iterator() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( m.type_specific_iterator() ) does not equal m" );
+
+		/* Now we play with conversion to array, wrapping and copying. */
+		ensure( m.equals( new RB_TREE_SET( m.TO_KEY_ARRAY() ) ), "Error (" + level + ", " + seed + "): m does not equal new ( toArray( m ) )" );
+		ensure( ( new RB_TREE_SET( m.TO_KEY_ARRAY() ) ).equals( m ), "Error (" + level + ", " + seed + "): new ( toArray( m ) ) does not equal m" );
+
+
+
+		int h = m.hashCode();
+
+
+		/* Now we save and read m. */
+
+		SORTED_SET m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (SORTED_SET)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for(Iterator i=t.iterator(); i.hasNext(); ) m2.remove(i.next());
+
+		ensure( m2.isEmpty(), "Error (" + level + ", " + seed + "): m2 is not empty (as it should be)" );
+				 
+		/* Now we play with iterators. */
+
+		{
+			java.util.ListIterator i, j;
+			Object J;
+			i = (java.util.ListIterator)m.iterator(); 
+			j = new java.util.LinkedList( t ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + level + ", " + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( i.next().equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next()" );
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( i.previous().equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous()" );
+
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + level + ", " + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + level + ", " + seed + "): divergence in previousIndex()" );
+
+			}
+
+		}
+
+		{
+			boolean badPrevious = false;
+			Object previous = null;
+			it.unimi.dsi.fastutil.BidirectionalIterator i;
+			java.util.ListIterator j;
+			Object I, J;
+			KEY_TYPE from = genKey();
+			j = new java.util.LinkedList( t ).listIterator(); 
+			while( j.hasNext() ) {
+				Object k = j.next();
+				if ( ((Comparable)k).compareTo( KEY2OBJ( from ) ) > 0 ) {
+					badPrevious = true;
+					j.previous();
+					break;
+				}
+				previous = k;
+			}
+
+			i = (it.unimi.dsi.fastutil.BidirectionalIterator)m.iterator( from ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" + badPrevious );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+					//System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+
+					badPrevious = false;
+
+					if ( r.nextFloat() < 0.5 ) {
+						//System.err.println("Removing in next");
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+
+					if ( r.nextFloat() < 0.5 ) {
+						//System.err.println("Removing in prev");
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" );
+
+		/* Now we select a pair of keys and create a subset. */
+
+		if ( ! m.isEmpty() ) {
+			java.util.ListIterator i;
+			Object start = m.first(), end = m.first();
+			for( i = (java.util.ListIterator)m.iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() );
+			for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() );
+
+			//System.err.println("Checking subSet from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testSets( (SORTED_SET)m.subSet( (KEY_CLASS)start, (KEY_CLASS)end ), t.subSet( start, end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subSet" );
+
+			//System.err.println("Checking headSet to " + end + " (level=" + (level+1) + ")..." );
+			testSets( (SORTED_SET)m.headSet( (KEY_CLASS)end ), t.headSet( end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headSet" );
+
+			//System.err.println("Checking tailSet from " + start + " (level=" + (level+1) + ")..." );
+			testSets( (SORTED_SET)m.tailSet( (KEY_CLASS)start ), t.tailSet( start ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailSet" );
+		}
+		  
+
+	}
+
+
+	private static void test( int n ) {
+		RB_TREE_SET m = new RB_TREE_SET();
+		SortedSet t = new java.util.TreeSet();
+		topSet = m;
+		k = new Object[n];
+		nk = new Object[n];
+		kt = new KEY_TYPE[n];
+		nkt = new KEY_TYPE[n];
+
+		for( int i = 0; i < n; i++ ) {
+#if #keyclass(Object)
+			k[i] = kt[i] = genKey();
+			nk[i] = nkt[i] = genKey();
+#else
+			k[i] = new KEY_CLASS( kt[i] = genKey() );
+			nk[i] = new KEY_CLASS( nkt[i] = genKey() );
+#endif
+		}
+		  
+		/* We add pairs to t. */
+		for( int i = 0; i < n;  i++ ) t.add( k[i] );
+		  
+		/* We add to m the same data */
+		m.addAll(t);
+
+		testSets( m, t, n, 0 );
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		int n  = Integer.parseInt(args[1]);
+		if ( args.length > 2 ) r = new java.util.Random( seed = Long.parseLong( args[ 2 ] ) );
+		  
+		try {
+			if ("speedTest".equals(args[0]) || "speedComp".equals(args[0])) speedTest( n, "speedComp".equals(args[0]) );
+			else if ( "test".equals( args[0] ) ) test(n);
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}	
+	}
+
+#endif
+
+
+}
diff --git a/drv/SemiIndirectHeaps.drv b/drv/SemiIndirectHeaps.drv
new file mode 100644
index 0000000..e7c4fe7
--- /dev/null
+++ b/drv/SemiIndirectHeaps.drv
@@ -0,0 +1,243 @@
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+#if #keyclass(Object)
+import java.util.Comparator;
+#endif
+
+import it.unimi.dsi.fastutil.ints.IntArrays;
+
+/** A class providing static methods and objects that do useful things with semi-indirect heaps.
+ *
+ * <P>A semi-indirect heap is based on a <em>reference array</em>. Elements of
+ * a semi-indirect heap are integers that index the reference array (note that
+ * in an <em>indirect</em> heap you can also map elements of the reference
+ * array to heap positions).  
+ */
+
+public class SEMI_INDIRECT_HEAPS {
+
+	private SEMI_INDIRECT_HEAPS() {}
+
+	/** Moves the given element down into the semi-indirect heap until it reaches the lowest possible position.
+	 *
+	 * @param refArray the reference array.
+	 * @param heap the semi-indirect heap (starting at 0).
+	 * @param size the number of elements in the heap.
+	 * @param i the index in the heap of the element to be moved down.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 * @return the new position in the heap of the element of heap index <code>i</code>.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC int downHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) {
+		if ( i >= size ) throw new IllegalArgumentException( "Heap position (" + i + ") is larger than or equal to heap size (" + size + ")" );
+
+		final int e = heap[ i ];
+		final KEY_GENERIC_TYPE E = refArray[ e ];
+		int child;
+
+		if ( c == null )
+			while ( ( child = 2 * i + 1 ) < size ) {
+				if ( child + 1 < size && KEY_LESS( refArray[ heap[ child + 1 ] ], refArray[ heap[ child ] ] ) ) child++;
+				if ( KEY_LESSEQ( E, refArray[ heap[ child ] ] ) ) break;
+				heap[ i ] = heap[ child ];
+				i = child;
+			}
+		else 
+			while ( ( child = 2 * i + 1 ) < size ) {
+				if ( child + 1 < size && c.compare( refArray[ heap[ child + 1 ] ], refArray[ heap[ child ] ] ) < 0 ) child++;
+				if ( c.compare( E, refArray[ heap[ child ] ] ) <= 0 ) break;
+				heap[ i ] = heap[ child ];
+				i = child;
+			}
+
+		heap[ i ] = e;
+
+		return i;
+	}
+
+	/** Moves the given element up in the semi-indirect heap until it reaches the highest possible position.
+	 *
+	 * @param refArray the reference array.
+	 * @param heap the semi-indirect heap (starting at 0).
+	 * @param size the number of elements in the heap.
+	 * @param i the index in the heap of the element to be moved up.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 * @return the new position in the heap of the element of heap index <code>i</code>.
+	 */
+
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC int upHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, int i, final KEY_COMPARATOR KEY_GENERIC c ) {
+		if ( i >= size ) throw new IllegalArgumentException( "Heap position (" + i + ") is larger than or equal to heap size (" + size + ")" );
+
+		final int e = heap[ i ];
+		int parent;
+		final KEY_GENERIC_TYPE E = refArray[ e ];
+
+		if ( c == null )
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( KEY_LESSEQ( refArray[ heap[ parent ] ], E ) ) break;
+				heap[ i ] = heap[ parent ]; 
+				i = parent;
+			}
+		else
+			while ( i != 0 && ( parent = ( i - 1 ) / 2 ) >= 0 ) {
+				if ( c.compare( refArray[ heap[ parent ] ], E ) <= 0 ) break;
+				heap[ i ] = heap[ parent ]; 
+				i = parent;
+			}
+
+		heap[ i ] = e;
+
+		return i;
+	}
+
+	/** Creates a semi-indirect heap in the given array.
+	 *
+	 * @param refArray the reference array.
+	 * @param offset the first element of the reference array to be put in the heap.
+	 * @param length the number of elements to be put in the heap.
+	 * @param heap the array where the heap is to be created.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 */
+
+	public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] refArray, final int offset, final int length, final int[] heap, final KEY_COMPARATOR KEY_GENERIC c ) {
+		ARRAYS.ensureOffsetLength( refArray, offset, length );
+		if ( heap.length < length ) throw new IllegalArgumentException( "The heap length (" + heap.length + ") is smaller than the number of elements (" + length + ")" );
+
+		int i = length;
+		while( i-- != 0 ) heap[ i ] = offset + i;
+
+		i = length / 2;
+		while( i-- != 0 ) downHeap( refArray, heap, length, i, c );
+	}
+
+	/** Creates a semi-indirect heap, allocating its heap array.
+	 *
+	 * @param refArray the reference array.
+	 * @param offset the first element of the reference array to be put in the heap.
+	 * @param length the number of elements to be put in the heap.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 * @return the heap array.
+	 */
+
+	public static KEY_GENERIC int[] makeHeap( final KEY_GENERIC_TYPE[] refArray, final int offset, final int length, final KEY_COMPARATOR KEY_GENERIC c ) {
+		int[] heap = length <= 0 ? IntArrays.EMPTY_ARRAY : new int[ length ];
+		makeHeap( refArray, offset, length, heap, c );
+		return heap;
+	}
+
+
+
+	/** Creates a semi-indirect heap from a given index array.
+	 *
+	 * @param refArray the reference array.
+	 * @param heap an array containing indices into <code>refArray</code>.
+	 * @param size the number of elements in the heap.
+	 * @param c a type-specific comparator, or <code>null</code> for the natural order.
+	 */
+
+	public static KEY_GENERIC void makeHeap( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, final KEY_COMPARATOR KEY_GENERIC c ) {
+		int i = size / 2;
+		while( i-- != 0 ) downHeap( refArray, heap, size, i, c );
+	}
+
+	/** Retrieves the front of a heap in a given array.
+	 *
+	 * <p>The <em>front</em> of a semi-indirect heap is the set of indices whose associated elements in the reference array 
+	 * are equal to the element associated to the first index.
+	 *
+	 * <p>In several circumstances you need to know the front, and scanning linearly the entire heap is not
+	 * the best strategy. This method simulates (using a partial linear scan) a breadth-first visit that 
+	 * terminates when all visited nodes are larger than the element associated
+	 * to the top index, which implies that no elements of the front can be found later. 
+	 * In most cases this trick yields a significant improvement.
+	 * 
+	 * @param refArray the reference array.
+	 * @param heap an array containing indices into <code>refArray</code>.
+	 * @param size the number of elements in the heap.
+	 * @param a an array large enough to hold the front (e.g., at least long as <code>refArray</code>).
+	 * @return the number of elements actually written (starting from the first position of <code>a</code>).
+	 */
+	@SuppressWarnings("unchecked")
+	public static KEY_GENERIC int front( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, final int[] a ) {
+		final KEY_GENERIC_TYPE top = refArray[ heap[ 0 ] ];
+		int j = 0, // The current position in a
+			l = 0, // The first position to visit in the next level (inclusive)
+			r = 1, // The last position to visit in the next level (exclusive)
+			f = 0; // The first position (in the heap array) of the next level
+		for( int i = 0; i < r; i++ ) {
+			if ( i == f ) { // New level
+				if ( l >= r ) break; // If we are crossing the two bounds, we're over
+				f = (f << 1) + 1; // Update the first position of the next level...
+				i = l; // ...and jump directly to position l
+				l = -1; // Invalidate l
+			}
+			if ( KEY_CMP_EQ( top, refArray[ heap[ i ] ] ) ) {
+				a[ j++ ] = heap[ i ];
+				if ( l == -1 ) l = i * 2 + 1; // If this is the first time in this level, set l
+				r = Math.min( size, i * 2 + 3 ); // Update r, but do not go beyond size
+			}
+		}
+
+		return j;
+	}
+
+	/** Retrieves the front of a heap in a given array using a given comparator.
+	 *
+	 * <p>The <em>front</em> of a semi-indirect heap is the set of indices whose associated elements in the reference array 
+	 * are equal to the element associated to the first index.
+	 *
+	 * <p>In several circumstances you need to know the front, and scanning linearly the entire heap is not
+	 * the best strategy. This method simulates (using a partial linear scan) a breadth-first visit that 
+	 * terminates when all visited nodes are larger than the element associated
+	 * to the top index, which implies that no elements of the front can be found later. 
+	 * In most cases this trick yields a significant improvement.
+	 * 
+	 * @param refArray the reference array.
+	 * @param heap an array containing indices into <code>refArray</code>.
+	 * @param size the number of elements in the heap.
+	 * @param a an array large enough to hold the front (e.g., at least long as <code>refArray</code>).
+	 * @param c a type-specific comparator.
+	 * @return the number of elements actually written (starting from the first position of <code>a</code>).
+	 */
+	public static KEY_GENERIC int front( final KEY_GENERIC_TYPE[] refArray, final int[] heap, final int size, final int[] a, final KEY_COMPARATOR KEY_GENERIC c ) {
+		final KEY_GENERIC_TYPE top = refArray[ heap[ 0 ] ];
+		int j = 0, // The current position in a
+			l = 0, // The first position to visit in the next level (inclusive)
+			r = 1, // The last position to visit in the next level (exclusive)
+			f = 0; // The first position (in the heap array) of the next level
+		for( int i = 0; i < r; i++ ) {
+			if ( i == f ) { // New level
+				if ( l >= r ) break; // If we are crossing the two bounds, we're over
+				f = (f << 1) + 1; // Update the first position of the next level...
+				i = l; // ...and jump directly to position l
+				l = -1; // Invalidate l
+			}
+			if ( c.compare( top, refArray[ heap[ i ] ] ) == 0 ) {
+				a[ j++ ] = heap[ i ];
+				if ( l == -1 ) l = i * 2 + 1; // If this is the first time in this level, set l
+				r = Math.min( size, i * 2 + 3 ); // Update r, but do not go beyond size
+			}
+		}
+
+		return j;
+	}
+}
diff --git a/drv/Set.drv b/drv/Set.drv
new file mode 100644
index 0000000..0622a7a
--- /dev/null
+++ b/drv/Set.drv
@@ -0,0 +1,50 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Set;
+
+/** A type-specific {@link Set}; provides some additional methods that use polymorphism to avoid (un)boxing. 
+ *
+ * <P>Additionally, this interface strengthens (again) {@link #iterator()}.
+ *
+ * @see Set
+ */
+
+public interface SET KEY_GENERIC extends COLLECTION KEY_GENERIC, Set<KEY_GENERIC_CLASS> {
+
+	/** Returns a type-specific iterator on the elements of this set.
+	 *
+	 * <p>Note that this specification strengthens the one given in {@link java.lang.Iterable#iterator()},
+	 * which was already strengthened in the corresponding type-specific class,
+	 * but was weakened by the fact that this interface extends {@link Set}.
+	 *
+	 * @return a type-specific iterator on the elements of this set.
+	 */
+	KEY_ITERATOR KEY_GENERIC iterator();
+
+	/** Removes an element from this set.
+	 *
+	 * <p>Note that the corresponding method of the type-specific collection is <code>rem()</code>.
+	 * This unfortunate situation is caused by the clash
+	 * with the similarly named index-based method in the {@link java.util.List} interface.
+	 *
+	 * @see java.util.Collection#remove(Object)
+	 */
+	public boolean remove( KEY_TYPE k );
+}
diff --git a/drv/Sets.drv b/drv/Sets.drv
new file mode 100644
index 0000000..392e922
--- /dev/null
+++ b/drv/Sets.drv
@@ -0,0 +1,515 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.Collection;
+import java.util.Set;
+
+/** A class providing static methods and objects that do useful things with type-specific sets.
+ *
+ * @see java.util.Collections
+ */
+
+public class SETS {
+
+	private SETS() {}
+
+	/** An immutable class representing the empty set and implementing a type-specific set interface.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific set.
+	 */
+
+	public static class EmptySet KEY_GENERIC extends COLLECTIONS.EmptyCollection KEY_GENERIC implements SET KEY_GENERIC, java.io.Serializable, Cloneable {
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected EmptySet() {}
+
+		public boolean remove( KEY_TYPE ok ) { throw new UnsupportedOperationException(); }
+		public Object clone() { return EMPTY_SET; }
+        private Object readResolve() { return EMPTY_SET; }
+	}
+
+
+	/** An empty set (immutable). It is serializable and cloneable.
+	 *
+	 * <P>The class of this objects represent an abstract empty set
+	 * that is a subset of a (sorted) type-specific set.
+	 */
+	@SuppressWarnings("rawtypes")
+	public static final EmptySet EMPTY_SET = new EmptySet();
+
+
+
+	/** An immutable class representing a type-specific singleton set.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific set.	 */
+
+	public static class Singleton KEY_GENERIC extends ABSTRACT_SET KEY_GENERIC implements java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+	
+		protected final KEY_GENERIC_TYPE element;
+	
+		protected Singleton( final KEY_GENERIC_TYPE element ) {
+			this.element = element;
+		}
+	
+		public boolean add( final KEY_GENERIC_TYPE k ) { throw new UnsupportedOperationException(); }
+
+		public boolean contains( final KEY_TYPE k ) { return KEY_EQUALS( k, element ); }
+	
+		public boolean addAll( final Collection<? extends KEY_GENERIC_CLASS> c ) { throw new UnsupportedOperationException(); }
+		public boolean removeAll( final Collection<?> c ) { throw new UnsupportedOperationException(); }
+		public boolean retainAll( final Collection<?> c ) { throw new UnsupportedOperationException(); }
+
+#if #keys(primitive)
+		/* Slightly optimized w.r.t. the one in ABSTRACT_SET. */
+	
+		public KEY_TYPE[] TO_KEY_ARRAY() {
+			KEY_TYPE a[] = new KEY_TYPE[ 1 ];
+			a[ 0 ] = element;
+			return a;
+		}
+	
+		public boolean addAll( final COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean removeAll( final COLLECTION c ) { throw new UnsupportedOperationException(); }
+		public boolean retainAll( final COLLECTION c ) { throw new UnsupportedOperationException(); }
+#endif
+		@SuppressWarnings("unchecked")
+		public KEY_LIST_ITERATOR KEY_GENERIC iterator() { return ITERATORS.singleton( element ); }
+	
+		public int size() { return 1; }
+	
+		public Object clone() { return this; }
+	}
+
+#if ! #keyclass(Reference)
+
+	/** Returns a type-specific immutable set containing only the specified element. The returned set is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned set.
+	 * @return a type-specific immutable set containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC SET KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) {
+		return new Singleton KEY_GENERIC( element );
+	}
+
+#endif
+
+#if ! #keyclass(Object) 
+
+	/** Returns a type-specific immutable set containing only the specified element. The returned set is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned set.
+	 * @return a type-specific immutable set containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC SET KEY_GENERIC singleton( final KEY_GENERIC_CLASS element ) {
+		return new Singleton KEY_GENERIC( KEY_CLASS2TYPE( element ) );
+	}
+
+#endif
+
+	/** A synchronized wrapper class for sets. */
+
+	public static class SynchronizedSet KEY_GENERIC extends COLLECTIONS.SynchronizedCollection KEY_GENERIC implements SET KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected SynchronizedSet( final SET KEY_GENERIC s, final Object sync ) {
+			super( s, sync );
+		}
+
+		protected SynchronizedSet( final SET KEY_GENERIC s ) {
+			super( s );
+		}
+
+		public boolean remove( final KEY_TYPE k ) { synchronized( sync ) { return collection.remove( KEY2OBJ( k ) ); } }
+		public boolean equals( final Object o ) { synchronized( sync ) { return collection.equals( o ); } }
+		public int hashCode() { synchronized( sync ) { return collection.hashCode(); } }
+	}
+
+
+	/** Returns a synchronized type-specific set backed by the given type-specific set.
+	 *
+	 * @param s the set to be wrapped in a synchronized set.
+	 * @return a synchronized view of the specified set.
+	 * @see java.util.Collections#synchronizedSet(Set)
+	 */
+	public static KEY_GENERIC SET KEY_GENERIC synchronize( final SET KEY_GENERIC s ) {	return new SynchronizedSet KEY_GENERIC( s ); }
+
+	/** Returns a synchronized type-specific set backed by the given type-specific set, using an assigned object to synchronize.
+	 *
+	 * @param s the set to be wrapped in a synchronized set.
+	 * @param sync an object that will be used to synchronize the access to the set.
+	 * @return a synchronized view of the specified set.
+	 * @see java.util.Collections#synchronizedSet(Set)
+	 */
+
+	public static KEY_GENERIC SET KEY_GENERIC synchronize( final SET KEY_GENERIC s, final Object sync ) { return new SynchronizedSet KEY_GENERIC( s, sync ); }
+
+
+
+	/** An unmodifiable wrapper class for sets. */
+
+	public static class UnmodifiableSet KEY_GENERIC extends COLLECTIONS.UnmodifiableCollection KEY_GENERIC implements SET KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected UnmodifiableSet( final SET KEY_GENERIC s ) {
+			super( s );
+		}
+
+		public boolean remove( final KEY_TYPE k ) { throw new UnsupportedOperationException(); }
+		public boolean equals( final Object o ) { return collection.equals( o ); }
+		public int hashCode() { return collection.hashCode(); }
+	}
+
+
+	/** Returns an unmodifiable type-specific set backed by the given type-specific set.
+	 *
+	 * @param s the set to be wrapped in an unmodifiable set.
+	 * @return an unmodifiable view of the specified set.
+	 * @see java.util.Collections#unmodifiableSet(Set)
+	 */
+	public static KEY_GENERIC SET KEY_GENERIC unmodifiable( final SET KEY_GENERIC s ) {	return new UnmodifiableSet KEY_GENERIC( s ); }
+
+
+
+#ifdef TEST
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static void test() {
+		int n = 100;
+		int c;
+		KEY_TYPE k = genKey();
+		Singleton m = new Singleton( k );
+		Set t = java.util.Collections.singleton( KEY2OBJ( k ) );
+
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsIndex, tThrowsIndex, mThrowsUnsupp, tThrowsUnsupp;
+		boolean rt = false, rm = false;
+
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + seed + "): ! t.equals( m ) at start" );
+
+		/* Now we check that m actually holds that data. */
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + seed + "): m and t differ on an entry after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + seed + "): m and t differ on an entry after insertion (iterating on m)" );
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				m.contains(T);
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + seed + "): contains() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + seed + "): contains() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex) ensure( m.contains(KEY2OBJ(T)) == t.contains(KEY2OBJ(T)), "Error (" + seed + "): divergence in keys between t and m (polymorphic method) " + m );
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				m.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + seed + "): contains() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + seed + "): contains() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + seed + "): contains() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( m.contains(KEY2OBJ(T)) ==  t.contains(KEY2OBJ(T)), "Error (" + seed + "): divergence between t and m (standard method) " + m );
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.add(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.add(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + seed + "): add() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + seed + "): add() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + seed + "): add() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + seed + "): add() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + seed + "): divergence in add() between t and m " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			if ( ! KEY_EQUALS( T, k ) && mThrowsUnsupp && ! tThrowsUnsupp ) mThrowsUnsupp = false; // Stupid bug in Collections.singleton()
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + seed + "): remove() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + seed + "): remove() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + seed + "): remove() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + seed + "): divergence in remove() between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + seed + "): ! m.equals( t ) after removal " + m );
+		ensure( t.equals(m), "Error (" + seed + "): ! t.equals( m ) after removal " + m );
+
+		/* Now we add and remove random collections in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.addAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.addAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + seed + "): addAll() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + seed + "): addAll() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + seed + "): addAll() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + seed + "): addAll() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + seed + "): divergence in addAll() between t and m " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.removeAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.removeAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			if ( ! KEY_EQUALS( T, k ) && mThrowsUnsupp && ! tThrowsUnsupp ) mThrowsUnsupp = false; // Stupid bug in Collections.singleton()
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + seed + "): removeAll() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + seed + "): removeAll() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + seed + "): removeAll() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + seed + "): removeAll() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + seed + "): divergence in removeAll() between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + seed + "): ! m.equals( t ) after set removal " + m );
+		ensure( t.equals(m), "Error (" + seed + "): ! t.equals( m ) after set removal " + m );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + seed + "): m and t differ on an entry after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + seed + "): m and t differ on an entry after removal (iterating on m)" );
+		}
+
+		if ( m instanceof Singleton ) {
+			ensure( m.equals( ((Singleton)m).clone() ), "Error (" + seed + "): m does not equal m.clone()" );
+			ensure( ((Singleton)m).clone().equals( m ), "Error (" + seed + "): m.clone() does not equal m" );
+		}
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		SET m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (SET)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if ! #keyclass(Reference)
+
+		ensure( m2.hashCode() == h, "Error (" + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + seed + "): ! t.equals( m2 ) after save/read" );
+#endif
+
+		System.out.println("Test OK");
+		return;
+	}
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	/** This method expects as first argument a lower-cased type (e.g., "int"),
+	 * and as second optional argument a seed. */
+
+	public static void main( String arg[] ) throws Exception {
+		if ( arg.length > 1 ) r = new java.util.Random( seed = Long.parseLong( arg[ 1 ] ) );
+		  
+		try {
+			test();
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+	
+#endif
+	
+}
diff --git a/drv/SortedMap.drv b/drv/SortedMap.drv
new file mode 100644
index 0000000..803b10b
--- /dev/null
+++ b/drv/SortedMap.drv
@@ -0,0 +1,157 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import VALUE_PACKAGE.VALUE_COLLECTION;
+
+import it.unimi.dsi.fastutil.objects.ObjectSortedSet;
+import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator;
+
+import java.util.Map;
+import java.util.SortedMap;
+
+#if #keys(reference)
+import java.util.Comparator;
+#endif
+
+/** A type-specific {@link SortedMap}; provides some additional methods that use polymorphism to avoid (un)boxing.
+ *
+ * <P>Additionally, this interface strengthens {@link #entrySet()},
+ * {@link #keySet()}, {@link #values()},
+ * {@link #comparator()}, {@link SortedMap#subMap(Object,Object)}, {@link SortedMap#headMap(Object)} and {@link SortedMap#tailMap(Object)}.
+ *
+ * @see SortedMap
+ */
+
+public interface SORTED_MAP KEY_VALUE_GENERIC extends MAP KEY_VALUE_GENERIC, SortedMap<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS> {
+
+	/** A sorted entry set providing fast iteration.
+	 *
+	 * <p>In some cases (e.g., hash-based classes) iteration over an entry set requires the creation
+	 * of a large number of entry objects. Some <code>fastutil</code>
+	 * maps might return {@linkplain #entrySet() entry set} objects of type <code>FastSortedEntrySet</code>: in this case, {@link #fastIterator() fastIterator()}
+	 * will return an iterator that is guaranteed not to create a large number of objects, <em>possibly
+	 * by returning always the same entry</em> (of course, mutated).
+	 */
+	public interface FastSortedEntrySet KEY_VALUE_GENERIC extends ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC>, FastEntrySet KEY_VALUE_GENERIC {
+		/** Returns a fast iterator over this sorted entry set; the iterator might return always the same entry object, suitably mutated.
+		 *
+		 * @return a fast iterator over this sorted entry set; the iterator might return always the same entry object, suitably mutated.
+		 */
+		public ObjectBidirectionalIterator<MAP.Entry KEY_VALUE_GENERIC> fastIterator( MAP.Entry KEY_VALUE_GENERIC from );
+	}
+
+	/** Returns a sorted-set view of the mappings contained in this map.
+	 *  Note that this specification strengthens the one given in the
+	 *  corresponding type-specific unsorted map.
+	 *
+	 * @return a sorted-set view of the mappings contained in this map.
+	 * @see Map#entrySet()
+	 */
+
+	ObjectSortedSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet();
+
+	/** Returns a type-specific sorted-set view of the mappings contained in this map.
+	 * Note that this specification strengthens the one given in the
+	 * corresponding type-specific unsorted map.
+	 *
+	 * @return a type-specific sorted-set view of the mappings contained in this map.
+	 * @see #entrySet()
+	 */
+
+	ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET();
+
+	/** Returns a sorted-set view of the keys contained in this map.
+	 *  Note that this specification strengthens the one given in the
+	 *  corresponding type-specific unsorted map.
+	 *
+	 * @return a sorted-set view of the keys contained in this map.
+	 * @see Map#keySet()
+	 */
+
+	SORTED_SET KEY_GENERIC keySet();
+
+	/** Returns a set view of the values contained in this map.
+	 * <P>Note that this specification strengthens the one given in {@link Map#values()},
+	 * which was already strengthened in the corresponding type-specific class,
+	 * but was weakened by the fact that this interface extends {@link SortedMap}.
+	 *
+	 * @return a set view of the values contained in this map.
+	 * @see Map#values()
+	 */
+
+	VALUE_COLLECTION VALUE_GENERIC values();
+	/** Returns the comparator associated with this sorted set, or null if it uses its keys' natural ordering.
+	 *
+	 *  <P>Note that this specification strengthens the one given in {@link SortedMap#comparator()}.
+	 *
+	 * @see SortedMap#comparator()
+	 */
+	KEY_COMPARATOR KEY_SUPER_GENERIC comparator();
+
+	/** Returns a view of the portion of this sorted map whose keys range from <code>fromKey</code>, inclusive, to <code>toKey</code>, exclusive.
+	 *
+	 *  <P>Note that this specification strengthens the one given in {@link SortedMap#subMap(Object,Object)}.
+	 *
+	 * @see SortedMap#subMap(Object,Object)
+	 */
+	SORTED_MAP KEY_VALUE_GENERIC subMap(KEY_GENERIC_CLASS fromKey, KEY_GENERIC_CLASS toKey);
+
+	/** Returns a view of the portion of this sorted map whose keys are strictly less than <code>toKey</code>.
+	 *
+	 *  <P>Note that this specification strengthens the one given in {@link SortedMap#headMap(Object)}.
+	 *
+	 * @see SortedMap#headMap(Object)
+	 */
+	SORTED_MAP KEY_VALUE_GENERIC headMap(KEY_GENERIC_CLASS toKey);
+
+	/** Returns a view of the portion of this sorted map whose keys are greater than or equal to <code>fromKey</code>.
+	 *
+	 *  <P>Note that this specification strengthens the one given in {@link SortedMap#tailMap(Object)}.
+	 *
+	 * @see SortedMap#tailMap(Object)
+	 */
+	SORTED_MAP KEY_VALUE_GENERIC tailMap(KEY_GENERIC_CLASS fromKey);
+
+#if #keys(primitive)
+	/**  Returns a view of the portion of this sorted map whose keys range from <code>fromKey</code>, inclusive, to <code>toKey</code>, exclusive.
+	 * @see SortedMap#subMap(Object,Object)
+	 */
+	SORTED_MAP KEY_VALUE_GENERIC subMap(KEY_TYPE fromKey, KEY_TYPE toKey);
+
+	/** Returns a view of the portion of this sorted map whose keys are strictly less than <code>toKey</code>.
+	 * @see SortedMap#headMap(Object)
+	 */
+	SORTED_MAP KEY_VALUE_GENERIC headMap(KEY_TYPE toKey);
+
+	/** Returns a view of the portion of this sorted map whose keys are greater than or equal to <code>fromKey</code>.
+	 * @see SortedMap#tailMap(Object)
+	 */
+	SORTED_MAP KEY_VALUE_GENERIC tailMap(KEY_TYPE fromKey);
+
+	/**
+	 * @see SortedMap#firstKey()
+	 */
+	KEY_TYPE FIRST_KEY();
+
+	/**
+	 * @see SortedMap#lastKey()
+	 */
+	KEY_TYPE LAST_KEY();
+#endif
+}
diff --git a/drv/SortedMaps.drv b/drv/SortedMaps.drv
new file mode 100644
index 0000000..5682c60
--- /dev/null
+++ b/drv/SortedMaps.drv
@@ -0,0 +1,887 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.objects.ObjectSortedSet;
+import it.unimi.dsi.fastutil.objects.ObjectSortedSets;
+
+import java.util.Comparator;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.NoSuchElementException;
+
+/** A class providing static methods and objects that do useful things with type-specific sorted maps.
+ *
+ * @see java.util.Collections
+ */
+
+public class SORTED_MAPS {
+
+	private SORTED_MAPS() {}
+
+	/** Returns a comparator for entries based on a given comparator on keys.
+	 *
+	 * @param comparator a comparator on keys.
+	 * @return the associated comparator on entries.
+	 */
+	public static KEY_GENERIC Comparator<? super Map.Entry<KEY_GENERIC_CLASS, ?>> entryComparator( final KEY_COMPARATOR KEY_GENERIC comparator ) {
+		return new Comparator<Map.Entry<KEY_GENERIC_CLASS, ?>>() {
+			public int compare( Map.Entry<KEY_GENERIC_CLASS, ?> x, Map.Entry<KEY_GENERIC_CLASS, ?> y ) {
+				return comparator.compare( x.getKey(), y.getKey() );
+			}
+		};
+	}
+
+
+	/** An immutable class representing an empty type-specific sorted map. 
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific sorted map.
+	 */
+
+	public static class EmptySortedMap KEY_VALUE_GENERIC extends MAPS.EmptyMap KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC, java.io.Serializable, Cloneable {
+		
+		private static final long serialVersionUID = -7046029254386353129L;
+		
+		protected EmptySortedMap() {}
+		
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; } 
+
+		@SuppressWarnings("unchecked")
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() { return ObjectSortedSets.EMPTY_SET; }
+		@SuppressWarnings("unchecked")
+		public ObjectSortedSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() { return ObjectSortedSets.EMPTY_SET; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_SET KEY_GENERIC keySet() { return SORTED_SETS.EMPTY_SET; }
+		
+		@SuppressWarnings("unchecked")
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return EMPTY_MAP; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { return EMPTY_MAP; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { return EMPTY_MAP; }
+
+		public KEY_GENERIC_TYPE FIRST_KEY() { throw new NoSuchElementException(); }
+		public KEY_GENERIC_TYPE LAST_KEY() { throw new NoSuchElementException(); }
+
+#if #keys(primitive)
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_CLASS oto ) { return headMap( KEY_CLASS2TYPE( oto ) ); }
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_CLASS ofrom ) { return tailMap( KEY_CLASS2TYPE( ofrom ) ); }
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_CLASS ofrom, KEY_GENERIC_CLASS oto ) { return subMap( KEY_CLASS2TYPE( ofrom ), KEY_CLASS2TYPE( oto ) ); }
+
+		public KEY_GENERIC_CLASS firstKey() { return KEY2OBJ( FIRST_KEY() ); }
+		public KEY_GENERIC_CLASS lastKey() { return KEY2OBJ( LAST_KEY() ); }
+#endif
+
+	}
+
+
+
+	/** An empty type-specific sorted map (immutable). It is serializable and cloneable. */
+	 
+	@SuppressWarnings("rawtypes")
+	public static final EmptySortedMap EMPTY_MAP = new EmptySortedMap();
+
+
+	/** An immutable class representing a type-specific singleton sorted map. 
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific sorted map.
+	 */
+
+	public static class Singleton KEY_VALUE_GENERIC extends MAPS.Singleton KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC, java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+	
+		protected final KEY_COMPARATOR KEY_SUPER_GENERIC comparator;
+
+		protected Singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value, KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) {
+			super( key, value );
+			this.comparator = comparator;
+		}
+
+		protected Singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) {
+			this( key, value, null );
+		}
+
+		@SuppressWarnings("unchecked")
+		final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) {
+			return comparator == null ? KEY_CMP( k1, k2 ) : comparator.compare( k1, k2 );
+		}
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return comparator; } 
+
+		@SuppressWarnings("unchecked")
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() { if ( entries == null ) entries = ObjectSortedSets.singleton( (MAP.Entry KEY_VALUE_GENERIC)new SingletonEntry(), (Comparator<? super MAP.Entry KEY_VALUE_GENERIC>)entryComparator( comparator ) ); return (ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC>)entries; }
+		@SuppressWarnings({ "rawtypes", "unchecked" })
+		public ObjectSortedSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() { return (ObjectSortedSet)ENTRYSET(); }
+
+		public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = SORTED_SETS.singleton( key, comparator ); return (SORTED_SET KEY_GENERIC)keys; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { if ( compare( from, key ) <= 0 && compare( key, to ) < 0 ) return this; return EMPTY_MAP; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { if ( compare( key, to ) < 0 ) return this; return EMPTY_MAP; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { if ( compare( from, key ) <= 0 ) return this; return EMPTY_MAP; }
+
+		public KEY_GENERIC_TYPE FIRST_KEY() { return key; }
+		public KEY_GENERIC_TYPE LAST_KEY() { return key; }
+
+#if #keys(primitive)
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( KEY_GENERIC_CLASS oto ) { return headMap( KEY_CLASS2TYPE( oto ) ); }
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( KEY_GENERIC_CLASS ofrom ) { return tailMap( KEY_CLASS2TYPE( ofrom ) ); }
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( KEY_GENERIC_CLASS ofrom, KEY_GENERIC_CLASS oto ) { return subMap( KEY_CLASS2TYPE( ofrom ), KEY_CLASS2TYPE( oto ) ); }
+
+		public KEY_GENERIC_CLASS firstKey() { return KEY2OBJ( FIRST_KEY() ); }
+		public KEY_GENERIC_CLASS lastKey() { return KEY2OBJ( LAST_KEY() ); }
+#endif
+	}
+
+	/** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable.
+	 *
+	 * <P>Note that albeit the returned map is immutable, its default return value may be changed.
+	 *
+	 * @param key the only key of the returned sorted map.
+	 * @param value the only value of the returned sorted map.
+	 * @return a type-specific immutable sorted map containing just the pair <code><key,value></code>.
+	 */
+
+	public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_CLASS key, VALUE_GENERIC_CLASS value ) {
+		return new Singleton KEY_VALUE_GENERIC( KEY_CLASS2TYPE( key ), VALUE_CLASS2TYPE( value ) );
+	}
+
+	/** RETURNS a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable.
+	 *
+	 * <P>Note that albeit the returned map is immutable, its default return value may be changed.
+	 *
+	 * @param key the only key of the returned sorted map.
+	 * @param value the only value of the returned sorted map.
+	 * @param comparator the comparator to use in the returned sorted map.
+	 * @return a type-specific immutable sorted map containing just the pair <code><key,value></code>.
+	 */
+
+	public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_CLASS key, VALUE_GENERIC_CLASS value, KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) {
+		return new Singleton KEY_VALUE_GENERIC( KEY_CLASS2TYPE( key ), VALUE_CLASS2TYPE( value ), comparator );
+	}
+
+#if #keys(primitive) || #values(primitive)
+
+	/** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable.
+	 *
+	 * <P>Note that albeit the returned map is immutable, its default return value may be changed.
+	 *
+	 * @param key the only key of the returned sorted map.
+	 * @param value the only value of the returned sorted map.
+	 * @return a type-specific immutable sorted map containing just the pair <code><key,value></code>.
+	 */
+
+	public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value ) {
+		return new Singleton KEY_VALUE_GENERIC( key, value );
+	}
+
+	/** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable.
+	 *
+	 * <P>Note that albeit the returned map is immutable, its default return value may be changed.
+	 *
+	 * @param key the only key of the returned sorted map.
+	 * @param value the only value of the returned sorted map.
+	 * @param comparator the comparator to use in the returned sorted map.
+	 * @return a type-specific immutable sorted map containing just the pair <code><key,value></code>.
+	 */
+
+	public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC singleton( final KEY_GENERIC_TYPE key, final VALUE_GENERIC_TYPE value, KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) {
+		return new Singleton KEY_VALUE_GENERIC( key, value, comparator );
+	}
+
+#endif
+
+
+		/** A synchronized wrapper class for sorted maps. */
+
+	public static class SynchronizedSortedMap KEY_VALUE_GENERIC extends MAPS.SynchronizedMap KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final SORTED_MAP KEY_VALUE_GENERIC sortedMap;
+
+		protected SynchronizedSortedMap( final SORTED_MAP KEY_VALUE_GENERIC m, final Object sync ) {
+			super( m, sync );
+			sortedMap = m;
+		}
+
+		protected SynchronizedSortedMap( final SORTED_MAP KEY_VALUE_GENERIC m ) {
+			super( m );
+			sortedMap = m;
+		}
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { synchronized( sync ) { return sortedMap.comparator(); } }
+
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() { if ( entries == null ) entries = ObjectSortedSets.synchronize( sortedMap.ENTRYSET(), sync ); return (ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC>)entries; }
+		@SuppressWarnings({ "rawtypes", "unchecked" })
+		public ObjectSortedSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() { return (ObjectSortedSet)ENTRYSET(); }
+		public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = SORTED_SETS.synchronize( sortedMap.keySet(), sync ); return (SORTED_SET KEY_GENERIC)keys; }
+
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.subMap( from, to ), sync ); }
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.headMap( to ), sync ); }
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.tailMap( from ), sync ); }
+
+		public KEY_GENERIC_TYPE FIRST_KEY() {  synchronized( sync ) { return sortedMap.FIRST_KEY(); } }
+		public KEY_GENERIC_TYPE LAST_KEY() {  synchronized( sync ) { return sortedMap.LAST_KEY(); } }
+
+#if #keys(primitive)
+		public KEY_GENERIC_CLASS firstKey() {  synchronized( sync ) { return sortedMap.firstKey(); } }
+		public KEY_GENERIC_CLASS lastKey() {  synchronized( sync ) { return sortedMap.lastKey(); } }
+
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.subMap( from, to ), sync ); }
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_CLASS to ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.headMap( to ), sync ); }
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_CLASS from ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( sortedMap.tailMap( from ), sync ); }
+#endif
+
+
+	}
+
+	/** Returns a synchronized type-specific sorted map backed by the given type-specific sorted map.
+	 *
+	 * @param m the sorted map to be wrapped in a synchronized sorted map.
+	 * @return a synchronized view of the specified sorted map.
+	 * @see java.util.Collections#synchronizedSortedMap(SortedMap)
+	 */
+	public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC synchronize( final SORTED_MAP KEY_VALUE_GENERIC m ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( m ); }
+
+	/** Returns a synchronized type-specific sorted map backed by the given type-specific sorted map, using an assigned object to synchronize.
+	 *
+	 * @param m the sorted map to be wrapped in a synchronized sorted map.
+	 * @param sync an object that will be used to synchronize the access to the sorted sorted map.
+	 * @return a synchronized view of the specified sorted map.
+	 * @see java.util.Collections#synchronizedSortedMap(SortedMap)
+	 */
+
+	public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC synchronize( final SORTED_MAP KEY_VALUE_GENERIC m, final Object sync ) { return new SynchronizedSortedMap KEY_VALUE_GENERIC( m, sync ); }
+
+
+
+
+	/** An unmodifiable wrapper class for sorted maps. */
+
+	public static class UnmodifiableSortedMap KEY_VALUE_GENERIC extends MAPS.UnmodifiableMap KEY_VALUE_GENERIC implements SORTED_MAP KEY_VALUE_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final SORTED_MAP KEY_VALUE_GENERIC sortedMap;
+
+		protected UnmodifiableSortedMap( final SORTED_MAP KEY_VALUE_GENERIC m ) {
+			super( m );
+			sortedMap = m;
+		}
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return sortedMap.comparator(); }
+
+		public ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC> ENTRYSET() { if ( entries == null ) entries = ObjectSortedSets.unmodifiable( sortedMap.ENTRYSET() ); return (ObjectSortedSet<MAP.Entry KEY_VALUE_GENERIC>)entries; }
+		@SuppressWarnings({ "rawtypes", "unchecked" })
+		public ObjectSortedSet<Map.Entry<KEY_GENERIC_CLASS, VALUE_GENERIC_CLASS>> entrySet() { return (ObjectSortedSet)ENTRYSET(); }
+		public SORTED_SET KEY_GENERIC keySet() { if ( keys == null ) keys = SORTED_SETS.unmodifiable( sortedMap.keySet() ); return (SORTED_SET KEY_GENERIC)keys; }
+
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.subMap( from, to ) ); }
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_TYPE to ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.headMap( to ) ); }
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_TYPE from ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.tailMap( from ) ); }
+
+		public KEY_GENERIC_TYPE FIRST_KEY() {  return sortedMap.FIRST_KEY(); }
+		public KEY_GENERIC_TYPE LAST_KEY() {  return sortedMap.LAST_KEY(); }
+
+#if #keys(primitive)
+		public KEY_GENERIC_CLASS firstKey() {  return sortedMap.firstKey(); }
+		public KEY_GENERIC_CLASS lastKey() {  return sortedMap.lastKey(); }
+
+		public SORTED_MAP KEY_VALUE_GENERIC subMap( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.subMap( from, to ) ); }
+		public SORTED_MAP KEY_VALUE_GENERIC headMap( final KEY_GENERIC_CLASS to ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.headMap( to ) ); }
+		public SORTED_MAP KEY_VALUE_GENERIC tailMap( final KEY_GENERIC_CLASS from ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( sortedMap.tailMap( from ) ); }
+#endif
+
+
+	}
+
+	/** Returns an unmodifiable type-specific sorted map backed by the given type-specific sorted map.
+	 *
+	 * @param m the sorted map to be wrapped in an unmodifiable sorted map.
+	 * @return an unmodifiable view of the specified sorted map.
+	 * @see java.util.Collections#unmodifiableSortedMap(SortedMap)
+	 */
+	public static KEY_VALUE_GENERIC SORTED_MAP KEY_VALUE_GENERIC unmodifiable( final SORTED_MAP KEY_VALUE_GENERIC m ) { return new UnmodifiableSortedMap KEY_VALUE_GENERIC( m ); }
+
+
+
+#if defined(TEST) && ! #keyclass(Reference)
+
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#else
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+	}
+
+	private static VALUE_TYPE genValue() {
+#if #valueclass(Byte) || #valueclass(Short) || #valueclass(Character)
+		return (VALUE_TYPE)(r.nextInt());
+#elif #values(primitive)
+		return r.NEXT_VALUE();
+#elif !#valueclass(Reference) || #keyclass(Reference)
+		return Integer.toBinaryString( r.nextInt() );
+#else
+		return new java.io.Serializable() {};
+#endif
+	}
+
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition p = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, p ).toString();
+	}
+
+	private static void speedTest( int n, boolean comp ) {
+		System.out.println( "There are presently no speed tests for this class." );
+	}
+
+
+	private static boolean valEquals(Object o1, Object o2) {
+		return o1 == null ? o2 == null : o1.equals(o2);
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	private static Object[] k, v, nk;
+	private static KEY_TYPE kt[];
+	private static KEY_TYPE nkt[];
+	private static VALUE_TYPE vt[];
+	private static SORTED_MAP topMap;
+
+	protected static void testMaps( SORTED_MAP m, SortedMap t, int n, int level ) {
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsUnsupp, tThrowsUnsupp;
+		Object rt = null, rm = null;
+
+		if ( level > 1 ) return;
+				
+
+		/* Now we check that both maps agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.firstKey();
+		}
+		catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.firstKey();
+		}
+		catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence at start in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their first key (" + m.firstKey() + ", " + t.firstKey() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.lastKey();
+		}
+		catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.lastKey();
+		}
+		catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence at start in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+
+		if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ at start on their last key (" + m.lastKey() + ", " + t.lastKey() +")");
+
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+
+
+		/* Now we check that m actually holds that data. */
+		for(java.util.Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(java.util.Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after insertion (iterating on m)" );
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after insertion (iterating on t)" );
+			ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for(java.util.Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after insertion (iterating on m)" );
+			ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after insertion (iterating on m)" );
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		for(java.util.Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on t)" );
+			ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for(java.util.Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after insertion (iterating on m)");
+			ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after insertion (iterating on m)");
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.containsKey(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.containsKey(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): containsKey() divergence in java.util.NoSuchElementException (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): containsKey() divergence in IllegalArgumentException (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) {
+				ensure( m.containsKey(KEY2OBJ(T)) == t.containsKey(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in keys between t and m (polymorphic method)" );
+					 
+#if #keyclass(Object) && ! ( #values(reference) )
+				if ((m.GET_VALUE(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+					t.get(KEY2OBJ(T)) != null && 
+					! VALUE2OBJ(m.GET_VALUE(T)).equals(t.get(KEY2OBJ(T)))) 
+#else
+					if ((m.get(T) != VALUE_NULL) != ((t.get(KEY2OBJ(T)) == null ? VALUE_NULL : VALUE_OBJ2TYPE(t.get(KEY2OBJ(T)))) != VALUE_NULL) || 
+						t.get(KEY2OBJ(T)) != null && 
+						! m.get(KEY2OBJ(T)).equals(t.get(KEY2OBJ(T)))) 
+#endif
+						{
+							System.out.println("Error (" + level + ", " + seed + "): divergence between t and m (polymorphic method)");
+							System.exit( 1 );
+						}
+			}
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+
+			try {
+				m.get(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+
+			try {
+				t.get(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): get() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): get() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			if ( !mThrowsNoElement && !mThrowsIllegal ) ensure( valEquals(m.get(KEY2OBJ(T)), t.get(KEY2OBJ(T))), "Error (" + level + ", " + seed + "): divergence between t and m (standard method)" );
+		}
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+			VALUE_TYPE U = genValue();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.put(KEY2OBJ(T), VALUE2OBJ(U));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.put(KEY2OBJ(T), VALUE2OBJ(U));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): put() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): put() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): put() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsUnsupp ) ensure( valEquals( rm, rt ), "Error (" + level + ", " + seed + "): divergence in put() between t and m (" + rt + ", " + rm + ")" );
+
+			T = genKey();
+
+			mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() divergence in java.util.NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")" );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): remove() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsUnsupp ) ensure( valEquals( rm, rt ), "Error (" + level + ", " + seed + "): divergence in remove() between t and m (" + rt + ", " + rm + ")" );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal" );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(java.util.Iterator i=t.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			ensure( valEquals(e.getValue(), m.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.entrySet().iterator(); i.hasNext();  ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			ensure( valEquals(e.getValue(), t.get(e.getKey())), "Error (" + level + ", " + seed + "): m and t differ on an entry ("+e+") after removal (iterating on m)" );
+		}
+
+		/* Now we check that m actually holds the same keys. */
+		  
+		for(java.util.Iterator i=t.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+") after removal (iterating on t)");
+			ensure( m.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key ("+o+", in keySet()) after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.keySet().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsKey(o), "Error (" + level + ", " + seed + "): m and t differ on a key after removal (iterating on m)");
+			ensure( t.keySet().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a key (in keySet()) after removal (iterating on m)");
+		}
+
+
+		/* Now we check that m actually hold the same values. */
+		  
+		for(java.util.Iterator i=t.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( m.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after removal (iterating on t)" );
+			ensure( m.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.values().iterator(); i.hasNext();  ) {
+			Object o = i.next();
+			ensure( t.containsValue(o), "Error (" + level + ", " + seed + "): m and t differ on a value after removal (iterating on m)");
+			ensure( t.values().contains(o), "Error (" + level + ", " + seed + "): m and t differ on a value (in values()) after removal (iterating on m)");
+		}
+
+		/* Now we check that both maps agree on first/last keys. */
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.firstKey();
+		}
+		catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.firstKey();
+		}
+		catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): firstKey() divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+		if ( ! mThrowsNoElement ) ensure( t.firstKey().equals( m.firstKey() ), "Error (" + level + ", " + seed + "): m and t differ on their first key (" + m.firstKey() + ", " + t.firstKey() +")" );
+
+		mThrowsNoElement = mThrowsIllegal = tThrowsNoElement = tThrowsIllegal = false;
+		  
+		try {
+			m.lastKey();
+		}
+		catch ( java.util.NoSuchElementException e ) { mThrowsNoElement = true; }
+		try {
+			t.lastKey();
+		}
+		catch ( java.util.NoSuchElementException e ) { tThrowsNoElement = true; }
+		  
+		ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): lastKey() divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")" );
+
+		if ( ! mThrowsNoElement ) ensure( t.lastKey().equals( m.lastKey() ), "Error (" + level + ", " + seed + "): m and t differ on their last key (" + m.lastKey() + ", " + t.lastKey() +")");
+
+		/* Now we check cloning. */
+
+		if ( level == 0 ) {
+			ensure( m.equals( ((Singleton)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((Singleton)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+			m = (SORTED_MAP)((Singleton)m).clone();
+		}
+
+		int h = m.hashCode();
+
+
+		/* Now we save and read m. */
+
+		SORTED_MAP m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (SORTED_MAP)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if !#valueclass(Reference)
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+		/* Now we take out of m everything, and check that it is empty. */
+#endif
+	  
+		/* Now we play with iterators. */
+		  
+		{
+			java.util.ListIterator i, j;
+			Object J;
+			i = (java.util.ListIterator)m.entrySet().iterator(); 
+			j = new java.util.LinkedList( t.entrySet() ).listIterator(); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext()" );
+				ensure( i.hasPrevious() == j.hasPrevious(), "Error (" + level + ", " + seed + "): divergence in hasPrevious()" );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( ((java.util.Map.Entry)i.next()).getKey().equals( J = ((Map.Entry)j.next()).getKey() ), "Error (" + level + ", " + seed + "): divergence in next()" );
+
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( ((java.util.Map.Entry)i.previous()).getKey().equals( J = ((Map.Entry)j.previous()).getKey() ), "Error (" + level + ", " + seed + "): divergence in previous()" );
+
+				}
+
+				ensure( i.nextIndex() == j.nextIndex(), "Error (" + level + ", " + seed + "): divergence in nextIndex()" );
+				ensure( i.previousIndex() == j.previousIndex(), "Error (" + level + ", " + seed + "): divergence in previousIndex()" );
+
+			}
+		}
+
+		{
+			boolean badPrevious = false;
+			Object previous = null;
+			it.unimi.dsi.fastutil.BidirectionalIterator i;
+			java.util.ListIterator j;
+			Object I, J;
+			KEY_TYPE from = genKey();
+			j = new java.util.LinkedList( t.keySet() ).listIterator(); 
+			while( j.hasNext() ) {
+				Object k = j.next();
+				if ( ((Comparable)k).compareTo( KEY2OBJ( from ) ) > 0 ) {
+					badPrevious = true;
+					j.previous();
+					break;
+				}
+				previous = k;
+			}
+
+			i = (it.unimi.dsi.fastutil.BidirectionalIterator)((SORTED_SET)m.keySet()).iterator( from ); 
+
+			for( int k = 0; k < 2*n; k++ ) {
+				ensure( i.hasNext() == j.hasNext(), "Error (" + level + ", " + seed + "): divergence in hasNext() (iterator with starting point " + from + ")" );
+				ensure( i.hasPrevious() == j.hasPrevious() || badPrevious && ( i.hasPrevious() == ( previous != null ) ), "Error (" + level + ", " + seed + "): divergence in hasPrevious() (iterator with starting point " + from + ")" + badPrevious );
+
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					ensure( ( I = i.next() ).equals( J = j.next() ), "Error (" + level + ", " + seed + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+					//System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+
+					badPrevious = false;
+
+					if ( r.nextFloat() < 0.5 ) {
+					}
+				}
+				else if ( !badPrevious && r.nextFloat() < .2 && i.hasPrevious() ) {
+					ensure( ( I = i.previous() ).equals( J = j.previous() ), "Error (" + level + ", " + seed + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")" );
+
+					if ( r.nextFloat() < 0.5 ) {
+					}
+				}
+			}
+
+		}
+
+		/* Now we check that m actually holds that data. */
+		  
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after iteration" );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after iteration" );
+
+		/* Now we select a pair of keys and create a submap. */
+
+		if ( ! m.isEmpty() ) {
+			java.util.ListIterator i;
+			Object start = m.firstKey(), end = m.firstKey();
+			for( i = (java.util.ListIterator)m.keySet().iterator(); i.hasNext() && r.nextFloat() < .3; start = end = i.next() );
+			for( ; i.hasNext() && r.nextFloat() < .95; end = i.next() );
+				
+			//System.err.println("Checking subMap from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.subMap( (KEY_CLASS)start, (KEY_CLASS)end ), t.subMap( start, end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subMap" );
+
+			//System.err.println("Checking headMap to " + end + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.headMap( (KEY_CLASS)end ), t.headMap( end ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headMap" );
+
+			//System.err.println("Checking tailMap from " + start + " (level=" + (level+1) + ")..." );
+			testMaps( (SORTED_MAP)m.tailMap( (KEY_CLASS)start ), t.tailMap( start ), n, level + 1 );
+
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailMap" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailMap" );
+		}
+		  
+
+	}
+
+
+	private static void test() {
+		int n = 1;
+		k = new Object[n];
+		v = new Object[n];
+		nk = new Object[n];
+		kt = new KEY_TYPE[n];
+		nkt = new KEY_TYPE[n];
+		vt = new VALUE_TYPE[n];
+			   
+
+		for( int i = 0; i < n; i++ ) {
+#if #keyclass(Object)
+			k[i] = kt[i] = genKey();
+			nk[i] = nkt[i] = genKey();
+#else
+			k[i] = new KEY_CLASS( kt[i] = genKey() );
+			nk[i] = new KEY_CLASS( nkt[i] = genKey() );
+#endif
+#if #values(reference)
+			v[i] = vt[i] = genValue();
+#else
+			v[i] = new VALUE_CLASS( vt[i] = genValue() );
+#endif
+		}
+		  
+		SORTED_MAP m = new Singleton( kt[0], vt[0] );
+		topMap = m;
+		SortedMap t1 = new java.util.TreeMap();
+		t1.put( k[0], v[0] );
+		SortedMap t = java.util.Collections.unmodifiableSortedMap( t1 );
+
+		testMaps( m, t, n, 0 );
+
+		System.out.println("Test OK");
+		return;
+	}
+
+
+	public static void main( String args[] ) {
+		if ( args.length > 1 ) r = new java.util.Random( seed = Long.parseLong( args[ 1 ] ) );
+		  
+		try {
+			test();
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+
+#endif
+
+	
+}
diff --git a/drv/SortedSet.drv b/drv/SortedSet.drv
new file mode 100644
index 0000000..f6e9731
--- /dev/null
+++ b/drv/SortedSet.drv
@@ -0,0 +1,151 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.SortedSet;
+import java.util.Collection;
+
+/** A type-specific {@link SortedSet}; provides some additional methods that use polymorphism to avoid (un)boxing.
+ *
+ * <P>Additionally, this interface strengthens {@link #iterator()},
+ * {@link #comparator()} (for primitive types), {@link SortedSet#subSet(Object,Object)}, 
+ * {@link SortedSet#headSet(Object)} and {@link SortedSet#tailSet(Object)}.
+ *
+ * @see SortedSet
+ */
+
+
+public interface SORTED_SET KEY_GENERIC extends SET KEY_GENERIC, SortedSet<KEY_GENERIC_CLASS> {
+
+	/** Returns a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator} on the elements in
+	 * this set, starting from a given element of the domain (optional operation).
+	 *
+	 * <P>This method returns a type-specific bidirectional iterator with given
+	 * starting point. The starting point is any element comparable to the
+	 * elements of this set (even if it does not actually belong to the
+	 * set). The next element of the returned iterator is the least element of
+	 * the set that is greater than the starting point (if there are no
+	 * elements greater than the starting point, {@link
+	 * it.unimi.dsi.fastutil.BidirectionalIterator#hasNext() hasNext()} will return
+	 * <code>false</code>). The previous element of the returned iterator is
+	 * the greatest element of the set that is smaller than or equal to the
+	 * starting point (if there are no elements smaller than or equal to the
+	 * starting point, {@link it.unimi.dsi.fastutil.BidirectionalIterator#hasPrevious()
+	 * hasPrevious()} will return <code>false</code>).
+	 *  
+	 * <P>Note that passing the last element of the set as starting point and
+	 * calling {@link it.unimi.dsi.fastutil.BidirectionalIterator#previous() previous()} you can traverse the
+	 * entire set in reverse order.
+	 *
+	 * @param fromElement an element to start from.
+	 * @return a bidirectional iterator on the element in this set, starting at the given element.
+	 * @throws UnsupportedOperationException if this set does not support iterators with a starting point.
+	 */
+
+	KEY_BIDI_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE fromElement );
+
+	/** Returns a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator} iterator on the collection.
+	 *
+	 * <P>The iterator returned by the {@link #iterator()} method and by this
+	 * method are identical; however, using this method you can save a type casting.
+	 *
+	 * Note that this specification strengthens the one given in the corresponding type-specific
+	 * {@link Collection}.
+	 *
+	 * @deprecated As of <code>fastutil</code> 5, replaced by {@link #iterator()}.
+	 */
+	@Deprecated
+	KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD();
+
+	/** Returns a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator} on the elements in
+	 * this set.
+	 *
+	 * <P>This method returns a parameterised bidirectional iterator. The iterator
+	 * can be moreover safely cast to a type-specific iterator.
+	 *
+	 * Note that this specification strengthens the one given in the corresponding type-specific
+	 * {@link Collection}.
+	 *
+	 * @return a bidirectional iterator on the element in this set. 
+	 */
+	KEY_BIDI_ITERATOR KEY_GENERIC iterator();
+
+	/** Returns a view of the portion of this sorted set whose elements range from <code>fromElement</code>, inclusive, to <code>toElement</code>, exclusive.
+	 *
+	 * <P>Note that this specification strengthens the one given in {@link SortedSet#subSet(Object,Object)}.
+	 *
+	 * @see SortedSet#subSet(Object,Object)
+	 */
+	SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_CLASS fromElement, KEY_GENERIC_CLASS toElement) ;
+
+	/** Returns a view of the portion of this sorted set whose elements are strictly less than <code>toElement</code>.
+	 *
+	 * <P>Note that this specification strengthens the one given in {@link SortedSet#headSet(Object)}.
+	 *
+	 * @see SortedSet#headSet(Object)
+	 */
+	SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_CLASS toElement );
+
+	/** Returns a view of the portion of this sorted set whose elements are greater than or equal to <code>fromElement</code>.
+	 *
+	 * <P>Note that this specification strengthens the one given in {@link SortedSet#tailSet(Object)}.
+	 *
+	 * @see SortedSet#tailSet(Object)
+	 */
+	SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_CLASS fromElement );
+
+
+#if #keys(primitive)
+
+	/** Returns the comparator associated with this sorted set, or null if it uses its elements' natural ordering.
+	 *
+	 * <P>Note that this specification strengthens the one given in {@link SortedSet#comparator()}.
+	 *
+	 * @see SortedSet#comparator()
+	 */
+
+	KEY_COMPARATOR comparator();
+	
+	/**
+	 * @see SortedSet#subSet(Object,Object)
+	 */
+	SORTED_SET subSet( KEY_TYPE fromElement, KEY_TYPE toElement) ;
+
+	/**
+	 * @see SortedSet#headSet(Object)
+	 */
+	SORTED_SET headSet( KEY_TYPE toElement );
+
+	/**
+	 * @see SortedSet#tailSet(Object)
+	 */
+	SORTED_SET tailSet( KEY_TYPE fromElement );
+
+	/**
+	 * @see SortedSet#first()
+	 */
+	KEY_TYPE FIRST();
+
+	/**
+	 * @see SortedSet#last()
+	 */
+	KEY_TYPE LAST();
+
+#endif 
+
+}
diff --git a/drv/SortedSets.drv b/drv/SortedSets.drv
new file mode 100644
index 0000000..d2213fe
--- /dev/null
+++ b/drv/SortedSets.drv
@@ -0,0 +1,666 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import java.util.SortedSet;
+import java.util.NoSuchElementException;
+#if #keys(reference)
+import java.util.Comparator;
+#endif
+
+
+/** A class providing static methods and objects that do useful things with type-specific sorted sets.
+ *
+ * @see java.util.Collections
+ */
+
+public class SORTED_SETS {
+
+	private SORTED_SETS() {}
+
+	/** An immutable class representing the empty sorted set and implementing a type-specific set interface.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific sorted set.
+	 */
+
+	public static class EmptySet KEY_GENERIC extends SETS.EmptySet KEY_GENERIC implements SORTED_SET KEY_GENERIC, java.io.Serializable, Cloneable {
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected EmptySet() {}
+
+		public boolean remove( KEY_TYPE ok ) { throw new UnsupportedOperationException(); }
+
+		@Deprecated
+		public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); }
+
+		@SuppressWarnings("unchecked")
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE from ) { return ITERATORS.EMPTY_ITERATOR; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_TYPE from, KEY_GENERIC_TYPE to ) { return EMPTY_SET; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_TYPE from ) { return EMPTY_SET; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_TYPE to ) { return EMPTY_SET; }
+
+		public KEY_GENERIC_TYPE FIRST() { throw new NoSuchElementException(); }
+		public KEY_GENERIC_TYPE LAST() { throw new NoSuchElementException(); }
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return null; }
+
+#if #keys(primitive)
+		public SORTED_SET KEY_GENERIC subSet( KEY_GENERIC_CLASS from, KEY_GENERIC_CLASS to ) { return EMPTY_SET; }
+		public SORTED_SET KEY_GENERIC headSet( KEY_GENERIC_CLASS from ) { return EMPTY_SET; }
+		public SORTED_SET KEY_GENERIC tailSet( KEY_GENERIC_CLASS to ) { return EMPTY_SET; }
+
+		public KEY_GENERIC_CLASS first() { throw new NoSuchElementException(); }
+		public KEY_GENERIC_CLASS last() { throw new NoSuchElementException(); }
+#endif
+
+		public Object clone() { return EMPTY_SET; }
+
+        private Object readResolve() { return EMPTY_SET; }
+	}
+
+
+	/** An empty sorted set (immutable). It is serializable and cloneable.
+	 *
+	 * <P>The class of this objects represent an abstract empty set
+	 * that is a subset of a (sorted) type-specific set.
+	 */
+
+	@SuppressWarnings("rawtypes")
+	public static final EmptySet EMPTY_SET = new EmptySet();
+
+	/** A class representing a singleton sorted set.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * a type-specific sorted set.
+	 */
+
+	public static class Singleton KEY_GENERIC extends SETS.Singleton KEY_GENERIC implements SORTED_SET KEY_GENERIC, java.io.Serializable, Cloneable {
+	
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		final KEY_COMPARATOR KEY_SUPER_GENERIC comparator;
+
+		private Singleton( final KEY_GENERIC_TYPE element, final KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) {
+			super( element );
+			this.comparator = comparator;
+		}
+
+		private Singleton( final KEY_GENERIC_TYPE element ) {
+			this( element, null );
+		}
+
+		@SuppressWarnings("unchecked")
+		final int compare( final KEY_GENERIC_TYPE k1, final KEY_GENERIC_TYPE k2 ) {
+			return comparator == null ? KEY_CMP( k1, k2 ) : comparator.compare( k1, k2 );
+		}
+
+		@Deprecated
+		public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() {
+			return iterator();
+		} 
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( KEY_GENERIC_TYPE from ) { 
+			KEY_BIDI_ITERATOR KEY_GENERIC i = iterator();
+			if ( compare( element, from ) <= 0 ) i.next();
+			return i;
+		}
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return comparator; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { if ( compare( from, element ) <= 0 && compare( element, to ) < 0 ) return this; return EMPTY_SET; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { if ( compare( element, to ) < 0 ) return this; return EMPTY_SET; }
+
+		@SuppressWarnings("unchecked")
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { if ( compare( from, element ) <= 0 ) return this; return EMPTY_SET; }
+
+		public KEY_GENERIC_TYPE FIRST() { return element; }
+		public KEY_GENERIC_TYPE LAST() { return element; }
+
+#if #keys(primitive)
+		public KEY_CLASS first() { return KEY2OBJ( element ); }
+		public KEY_CLASS last() { return KEY2OBJ( element ); }
+
+
+		public SORTED_SET KEY_GENERIC subSet( final KEY_CLASS from, final KEY_CLASS to ) { return subSet( KEY_CLASS2TYPE( from ), KEY_CLASS2TYPE( to ) ); }
+		public SORTED_SET KEY_GENERIC headSet( final KEY_CLASS to ) { return headSet( KEY_CLASS2TYPE( to ) ); }
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_CLASS from ) { return tailSet( KEY_CLASS2TYPE( from ) ); }
+#endif
+	}
+
+
+	/** Returns a type-specific immutable sorted set containing only the specified element. The returned sorted set is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned sorted set.
+	 * @return a type-specific immutable sorted set containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC SORTED_SET KEY_GENERIC singleton( final KEY_GENERIC_TYPE element ) {
+		return new Singleton KEY_GENERIC( element );
+	}
+
+	/** Returns a type-specific immutable sorted set containing only the specified element, and using a specified comparator. The returned sorted set is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned sorted set.
+	 * @param comparator the comparator to use in the returned sorted set.
+	 * @return a type-specific immutable sorted set containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC SORTED_SET KEY_GENERIC singleton( final KEY_GENERIC_TYPE element, final KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) {
+		return new Singleton KEY_GENERIC( element, comparator );
+	}
+
+#if #keys(primitive) 
+
+	/** Returns a type-specific immutable sorted set containing only the specified element. The returned sorted set is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned sorted set.
+	 * @return a type-specific immutable sorted set containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC SORTED_SET KEY_GENERIC singleton( final Object element ) {
+		return new Singleton( KEY_OBJ2TYPE( element ) );
+	}
+
+	/** Returns a type-specific immutable sorted set containing only the specified element, and using a specified comparator. The returned sorted set is serializable and cloneable.
+	 *
+	 * @param element the only element of the returned sorted set.
+	 * @param comparator the comparator to use in the returned sorted set.
+	 * @return a type-specific immutable sorted set containing just <code>element</code>.
+	 */
+
+	public static KEY_GENERIC SORTED_SET KEY_GENERIC singleton( final Object element, final KEY_COMPARATOR KEY_SUPER_GENERIC comparator ) {
+		return new Singleton( KEY_OBJ2TYPE( element ), comparator );
+	}
+#endif
+
+
+	/** A synchronized wrapper class for sorted sets. */
+
+	public static class SynchronizedSortedSet KEY_GENERIC extends SETS.SynchronizedSet KEY_GENERIC implements SORTED_SET KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final SORTED_SET KEY_GENERIC sortedSet;
+
+		protected SynchronizedSortedSet( final SORTED_SET KEY_GENERIC s, final Object sync ) {
+			super( s, sync );
+			sortedSet = s;
+		}
+
+		protected SynchronizedSortedSet( final SORTED_SET KEY_GENERIC s ) {
+			super( s );
+			sortedSet = s;
+		}
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { synchronized( sync ) { return sortedSet.comparator(); } }
+
+		public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new SynchronizedSortedSet  KEY_GENERIC( sortedSet.subSet( from, to ), sync ); }
+		public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { return new SynchronizedSortedSet KEY_GENERIC( sortedSet.headSet( to ), sync ); }
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { return new SynchronizedSortedSet KEY_GENERIC( sortedSet.tailSet( from ), sync ); }
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return sortedSet.iterator(); }
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return sortedSet.iterator( from ); }
+
+		@Deprecated
+		public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return sortedSet.iterator(); }
+
+		public KEY_GENERIC_TYPE FIRST() {  synchronized( sync ) { return sortedSet.FIRST(); } }
+		public KEY_GENERIC_TYPE LAST() {  synchronized( sync ) { return sortedSet.LAST(); } }
+
+#if #keys(primitive)
+		public KEY_CLASS first() {  synchronized( sync ) { return sortedSet.first(); } }
+		public KEY_CLASS last() {  synchronized( sync ) { return sortedSet.last(); } }
+
+		public SORTED_SET KEY_GENERIC subSet( final KEY_CLASS from, final KEY_CLASS to ) { return new SynchronizedSortedSet( sortedSet.subSet( from, to ), sync ); }
+		public SORTED_SET KEY_GENERIC headSet( final KEY_CLASS to ) { return new SynchronizedSortedSet( sortedSet.headSet( to ), sync ); }
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_CLASS from ) { return new SynchronizedSortedSet( sortedSet.tailSet( from ), sync ); }
+#endif
+	}
+
+
+	/** Returns a synchronized type-specific sorted set backed by the given type-specific sorted set.
+	 *
+	 * @param s the sorted set to be wrapped in a synchronized sorted set.
+	 * @return a synchronized view of the specified sorted set.
+	 * @see java.util.Collections#synchronizedSortedSet(SortedSet)
+	 */
+	public static KEY_GENERIC SORTED_SET KEY_GENERIC synchronize( final SORTED_SET KEY_GENERIC s ) {	return new SynchronizedSortedSet KEY_GENERIC( s ); }
+
+	/** Returns a synchronized type-specific sorted set backed by the given type-specific sorted set, using an assigned object to synchronize.
+	 *
+	 * @param s the sorted set to be wrapped in a synchronized sorted set.
+	 * @param sync an object that will be used to synchronize the access to the sorted set.
+	 * @return a synchronized view of the specified sorted set.
+	 * @see java.util.Collections#synchronizedSortedSet(SortedSet)
+	 */
+
+	public static KEY_GENERIC SORTED_SET KEY_GENERIC synchronize( final SORTED_SET KEY_GENERIC s, final Object sync ) { return new SynchronizedSortedSet KEY_GENERIC( s, sync ); }
+
+
+
+
+
+	/** An unmodifiable wrapper class for sorted sets. */
+
+	public static class UnmodifiableSortedSet KEY_GENERIC extends SETS.UnmodifiableSet KEY_GENERIC implements SORTED_SET KEY_GENERIC, java.io.Serializable {
+
+		private static final long serialVersionUID = -7046029254386353129L;
+
+		protected final SORTED_SET KEY_GENERIC sortedSet;
+
+		protected UnmodifiableSortedSet( final SORTED_SET KEY_GENERIC s ) {
+			super( s );
+			sortedSet = s;
+		}
+
+		public KEY_COMPARATOR KEY_SUPER_GENERIC comparator() { return sortedSet.comparator(); }
+
+		public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_TYPE from, final KEY_GENERIC_TYPE to ) { return new UnmodifiableSortedSet KEY_GENERIC( sortedSet.subSet( from, to ) ); }
+		public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_TYPE to ) { return new UnmodifiableSortedSet KEY_GENERIC( sortedSet.headSet( to ) ); }
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_TYPE from ) { return new UnmodifiableSortedSet KEY_GENERIC( sortedSet.tailSet( from ) ); }
+
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator() { return ITERATORS.unmodifiable( sortedSet.iterator() ); }
+		public KEY_BIDI_ITERATOR KEY_GENERIC iterator( final KEY_GENERIC_TYPE from ) { return ITERATORS.unmodifiable( sortedSet.iterator( from ) ); }
+
+		@Deprecated
+		public KEY_BIDI_ITERATOR KEY_GENERIC KEY_ITERATOR_METHOD() { return iterator(); }
+
+		public KEY_GENERIC_TYPE FIRST() {  return sortedSet.FIRST(); }
+		public KEY_GENERIC_TYPE LAST() {  return sortedSet.LAST(); }
+
+#if #keys(primitive)
+		public KEY_CLASS first() {  return sortedSet.first(); }
+		public KEY_CLASS last() {  return sortedSet.last(); }
+
+		public SORTED_SET KEY_GENERIC subSet( final KEY_GENERIC_CLASS from, final KEY_GENERIC_CLASS to ) { return new UnmodifiableSortedSet( sortedSet.subSet( from, to ) ); }
+		public SORTED_SET KEY_GENERIC headSet( final KEY_GENERIC_CLASS to ) { return new UnmodifiableSortedSet( sortedSet.headSet( to ) ); }
+		public SORTED_SET KEY_GENERIC tailSet( final KEY_GENERIC_CLASS from ) { return new UnmodifiableSortedSet( sortedSet.tailSet( from ) ); }
+#endif
+	}
+
+
+	/** Returns an unmodifiable type-specific sorted set backed by the given type-specific sorted set.
+	 *
+	 * @param s the sorted set to be wrapped in an unmodifiable sorted set.
+	 * @return an unmodifiable view of the specified sorted set.
+	 * @see java.util.Collections#unmodifiableSortedSet(SortedSet)
+	 */
+	public static KEY_GENERIC SORTED_SET KEY_GENERIC unmodifiable( final SORTED_SET KEY_GENERIC s ) { return new UnmodifiableSortedSet KEY_GENERIC( s ); }
+
+
+
+#if defined(TEST) && ! #keyclass( Reference )
+
+	private static KEY_TYPE genKey() {
+#if #keyclass(Byte) || #keyclass(Short) || #keyclass(Character)
+		return (KEY_TYPE)(r.nextInt());
+#elif #keys(primitive)
+		return r.NEXT_KEY(); 
+#elif #keyclass(Object)
+		return Integer.toBinaryString( r.nextInt() );
+#endif
+	}
+
+
+	protected static void testSets( KEY_TYPE k, SORTED_SET m, SortedSet t, int level ) {
+		int n = 100;
+		int c;
+
+		long ms;
+		boolean mThrowsIllegal, tThrowsIllegal, mThrowsNoElement, tThrowsNoElement, mThrowsIndex, tThrowsIndex, mThrowsUnsupp, tThrowsUnsupp;
+		boolean rt = false, rm = false;
+
+		if ( level == 0 ) return;
+
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || ! t.equals( m ) ) System.err.println("m: " + m + " t: " + t);
+
+		ensure( m.equals( t ), "Error (" + level + ", " + seed + "): ! m.equals( t ) at start" );
+		ensure( t.equals( m ), "Error (" + level + ", " + seed + "): ! t.equals( m ) at start" );
+
+		/* Now we check that m actually holds that data. */
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on t)" );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after insertion (iterating on m)" );
+		}
+
+		/* Now we check that inquiries about random data give the same answer in m and t. For
+		   m we use the polymorphic method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+				
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				m.contains(T);
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): contains() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex) ensure( m.contains(KEY2OBJ(T)) == t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence in keys between t and m (polymorphic method) " + m );
+		}
+
+		/* Again, we check that inquiries about random data give the same answer in m and t, but
+		   for m we use the standard method. */
+
+		for(int i=0; i<n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				m.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				t.contains(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): contains() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): contains() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): contains() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): contains() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( m.contains(KEY2OBJ(T)) ==  t.contains(KEY2OBJ(T)), "Error (" + level + ", " + seed + "): divergence between t and m (standard method) " + m );
+		}
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.add(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.add(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): add() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): add() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): add() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): add() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in add() between t and m " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.remove(KEY2OBJ(T));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			if ( ! KEY_EQUALS( T, k ) && ! mThrowsUnsupp && tThrowsUnsupp ) mThrowsUnsupp = false; // Stupid bug in Collections.singleton()
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): remove() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): remove() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): remove() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): remove() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in remove() between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after removal " + m );
+
+		/* Now we add and remove random collections in m and t, checking that the result is the same. */
+
+		for(int i=0; i<20*n;  i++ ) {
+			KEY_TYPE T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.addAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.addAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): addAll() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): addAll() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): addAll() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): addAll() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in addAll() between t and m " + m );
+
+			T = genKey();
+
+			mThrowsIndex = tThrowsIndex = mThrowsNoElement = tThrowsNoElement = mThrowsIllegal = tThrowsIllegal = mThrowsUnsupp = tThrowsUnsupp = false;
+
+			try {
+				rm = m.removeAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( NoSuchElementException e ) { mThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { mThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { mThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { mThrowsUnsupp = true; }
+
+			try {
+				rt = t.removeAll(java.util.Collections.singleton(KEY2OBJ(T)));
+			}
+			catch ( NoSuchElementException e ) { tThrowsNoElement = true; }
+			catch ( IllegalArgumentException e ) { tThrowsIllegal = true; }
+			catch ( IndexOutOfBoundsException e ) { tThrowsIndex = true; }
+			catch ( UnsupportedOperationException e ) { tThrowsUnsupp = true; }
+
+
+			ensure( mThrowsNoElement == tThrowsNoElement, "Error (" + level + ", " + seed + "): removeAll() divergence in NoSuchElementException for " + T + " (" + mThrowsNoElement + ", " + tThrowsNoElement + ") " + m );
+			ensure( mThrowsIllegal == tThrowsIllegal, "Error (" + level + ", " + seed + "): removeAll() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ") " + m );
+			ensure( mThrowsIndex == tThrowsIndex, "Error (" + level + ", " + seed + "): removeAll() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsIndex + ", " + tThrowsIndex + ") " + m );
+			ensure( mThrowsUnsupp == tThrowsUnsupp, "Error (" + level + ", " + seed + "): removeAll() divergence in UnsupportedOperationException for " + T + " (" + mThrowsUnsupp + ", " + tThrowsUnsupp + ") " + m );
+			if ( !mThrowsNoElement && !mThrowsIllegal && !mThrowsIndex && !mThrowsUnsupp ) ensure( rm == rt, "Error (" + level + ", " + seed + "): divergence in removeAll() between t and m " + m );
+		}
+
+		ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after set removal " + m );
+		ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after set removal " + m );
+
+		/* Now we check that m actually holds the same data. */
+		  
+		for(java.util.Iterator i=t.iterator(); i.hasNext();  ) {
+			ensure( m.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on t)");
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+		  
+		for(java.util.Iterator i=m.iterator(); i.hasNext();  ) {
+			ensure( t.contains( i.next() ), "Error (" + level + ", " + seed + "): m and t differ on an entry after removal (iterating on m)" );
+		}
+
+		if ( m instanceof Singleton ) {
+			ensure( m.equals( ((Singleton)m).clone() ), "Error (" + level + ", " + seed + "): m does not equal m.clone()" );
+			ensure( ((Singleton)m).clone().equals( m ), "Error (" + level + ", " + seed + "): m.clone() does not equal m" );
+		}
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		SORTED_SET m2 = null;
+		  
+		try {
+			java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test");
+			java.io.OutputStream os = new java.io.FileOutputStream(ff);
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
+				
+			oos.writeObject(m);
+			oos.close();
+				
+			java.io.InputStream is = new java.io.FileInputStream(ff);
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
+				
+			m2 = (SORTED_SET)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+
+#if ! #keyclass(Reference)
+
+		ensure( m2.hashCode() == h, "Error (" + level + ", " + seed + "): hashCode() changed after save/read" );
+		  
+		/* Now we check that m2 actually holds that data. */
+		  
+		ensure( m2.equals(t), "Error (" + level + ", " + seed + "): ! m2.equals( t ) after save/read" );
+		ensure( t.equals(m2), "Error (" + level + ", " + seed + "): ! t.equals( m2 ) after save/read" );
+#endif
+
+		/* Now we select a pair of keys and create a subset. */
+
+		if ( ! m.isEmpty() ) {
+			java.util.ListIterator i;
+			Object start = m.first(), end = m.first();
+			for( i = (java.util.ListIterator)m.iterator(); i.hasNext() && r.nextBoolean(); start = end = i.next() );
+			for( ; i.hasNext() && r.nextBoolean(); end = i.next() );
+			
+			//System.err.println("Checking subSet from " + start + " to " + end + " (level=" + (level+1) + ")..." );
+			testSets( k, (SORTED_SET)m.subSet( (KEY_CLASS)start, (KEY_CLASS)end ), t.subSet( start, end ), level - 1 );
+			
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after subSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after subSet" );
+			
+			//System.err.println("Checking headSet to " + end + " (level=" + (level+1) + ")..." );
+			testSets( k, (SORTED_SET)m.headSet( (KEY_CLASS)end ), t.headSet( end ), level - 1 );
+			
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after headSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after headSet" );
+			
+			//System.err.println("Checking tailSet from " + start + " (level=" + (level+1) + ")..." );
+			testSets( k, (SORTED_SET)m.tailSet( (KEY_CLASS)start ), t.tailSet( start ), level - 1 );
+			
+			ensure( m.equals(t), "Error (" + level + ", " + seed + "): ! m.equals( t ) after tailSet" );
+			ensure( t.equals(m), "Error (" + level + ", " + seed + "): ! t.equals( m ) after tailSet" );
+		}
+
+		return;
+	}
+
+	private static void test() {
+		KEY_TYPE k = genKey();
+		Singleton m = new Singleton( k );
+		SortedSet u = new java.util.TreeSet();
+		u.add( KEY2OBJ( k ) );
+		testSets( k, m, java.util.Collections.unmodifiableSortedSet( u ), 2 );
+		System.out.println("Test OK");
+	}
+
+	
+	private static long seed = System.currentTimeMillis(); 
+	private static java.util.Random r = new java.util.Random( seed );
+
+	private static java.text.NumberFormat format = new java.text.DecimalFormat( "#,###.00" );
+	private static java.text.FieldPosition fp = new java.text.FieldPosition( 0 );
+
+	private static String format( double d ) {
+		StringBuffer s = new StringBuffer();
+		return format.format( d, s, fp ).toString();
+	}
+
+	private static void fatal( String msg ) {
+		System.out.println( msg );
+		System.exit( 1 );
+	}
+
+	private static void ensure( boolean cond, String msg ) {
+		if ( cond ) return;
+		fatal( msg );
+	}
+
+	/** This method expects as first argument a lower-cased type (e.g., "int"),
+	 * and as second optional argument a seed. */
+
+	public static void main( String arg[] ) throws Exception {
+		if ( arg.length > 1 ) r = new java.util.Random( seed = Long.parseLong( arg[ 1 ] ) );
+		  
+		try {
+			test();
+		} catch( Throwable e ) {
+			e.printStackTrace( System.err );
+			System.err.println( "seed: " + seed );
+		}
+	}
+	
+#endif
+	
+}
diff --git a/drv/Stack.drv b/drv/Stack.drv
new file mode 100644
index 0000000..97ee32a
--- /dev/null
+++ b/drv/Stack.drv
@@ -0,0 +1,50 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.Stack;
+
+/** A type-specific {@link Stack}; provides some additional methods that use polymorphism to avoid (un)boxing.
+ */
+
+public interface STACK KEY_GENERIC extends Stack<KEY_GENERIC_CLASS> {
+
+	/**
+	 * @see Stack#push(Object)
+	 */
+
+	void push( KEY_TYPE k );
+
+	/**
+	 * @see Stack#pop()
+	 */
+	KEY_TYPE POP();
+
+	/** 
+	 * @see Stack#top()
+	 */
+
+	KEY_TYPE TOP();
+
+	/** 
+	 * @see Stack#peek(int)
+	 */
+
+	KEY_TYPE PEEK( int i );
+
+}
diff --git a/drv/StripedOpenHashMap.drv b/drv/StripedOpenHashMap.drv
new file mode 100644
index 0000000..26a9451
--- /dev/null
+++ b/drv/StripedOpenHashMap.drv
@@ -0,0 +1,168 @@
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+package PACKAGE;
+
+import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
+
+import java.io.Serializable;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
+
+/** A concurrent counting map. The map is made by a number of <em>stripes</em> (instances of {@link Object2IntOpenHashMap}) 
+ * which are accessed independently
+ * using a {@link ReentrantReadWriteLock}. Only one thread can write in a stripe at a time, but different stripes
+ * can be modified independently and read access can happen concurrently on each stripe.
+ *  
+ * @param <K> the type of keys.
+ */
+
+public class STRIPED_OPEN_HASH_MAP KEY_VALUE_GENERIC extends ABSTRACT_MAP KEY_VALUE_GENERIC implements java.io.Serializable, Cloneable {
+	private static final long serialVersionUID = 1L;
+	
+	/** The stripes. Keys are distributed among them using the lower bits of their {@link Object#hashCode()}. */
+	private final OPEN_HASH_MAP KEY_VALUE_GENERIC[] map;
+	/** An array of locks parallel to {@link #map}, protecting each stripe. */
+	private final transient ReentrantReadWriteLock[] lock;
+	/** {@link #map map.length} − 1, cached. */
+	private final int mask;
+
+	/** Creates a new concurrent counting map with concurrency level equal to {@link Runtime#availableProcessors()}. */
+	public STRIPED_OPEN_HASH_MAP() {
+		this( Runtime.getRuntime().availableProcessors() );
+	}
+
+	/** Creates a new concurrent counting map.
+	 * 
+	 * @param concurrencyLevel the number of stripes (it will be {@linkplain Integer#highestOneBit(int) forced to be a power of two}); ideally, as large as the number of threads that will ever access
+	 * this map, but higher values require more space.
+	 */
+	@SuppressWarnings("unchecked")
+	public STRIPED_OPEN_HASH_MAP( final int concurrencyLevel ) {
+		map = new OPEN_HASH_MAP[ Integer.highestOneBit( concurrencyLevel ) ];
+		lock = new ReentrantReadWriteLock[ map.length ];
+		for( int i = map.length; i-- != 0; ) {
+			map[ i ] = new OPEN_HASH_MAP KEY_VALUE_GENERIC();
+			lock[ i ] = new ReentrantReadWriteLock();
+		}
+		mask = map.length - 1;
+	}
+	
+#if #keys(primitive)
+
+	public VALUE_GENERIC_CLASS get( final KEY_CLASS k ) {
+		final int stripe = KEY2INTHASH( k ) & mask;
+		final ReadLock readLock = lock[ stripe ].readLock();
+		try {
+			readLock.lock();
+			return map[ stripe ].get( k );
+		}
+		finally {
+			readLock.unlock();
+		}
+	}
+
+#endif
+
+	@SuppressWarnings("unchecked")
+	public VALUE_GENERIC_TYPE GET_VALUE( final KEY_TYPE k ) {
+		final int stripe = KEY2INTHASH( k ) & mask;
+		final ReadLock readLock = lock[ stripe ].readLock();
+		try {
+			readLock.lock();
+			return map[ stripe ].GET_VALUE( k );
+		}
+		finally {
+			readLock.unlock();
+		}
+	}
+
+	public VALUE_GENERIC_TYPE put( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) {
+		final int stripe = KEY2INTHASH( k ) & mask;
+		final WriteLock writeLock = lock[ stripe ].writeLock();
+		try {
+			writeLock.lock();
+			return map[ stripe ].put( k, v );
+		}
+		finally {
+			writeLock.unlock();
+		}
+	}
+
+	public VALUE_GENERIC_TYPE putIfAbsent( final KEY_GENERIC_TYPE k, final VALUE_GENERIC_TYPE v ) {
+		final int stripe = KEY2INTHASH( k ) & mask;
+		final WriteLock writeLock = lock[ stripe ].writeLock();
+		try {
+			writeLock.lock();
+			if ( map[ stripe ].containsKey( k ) ) return map[ stripe ].get( k );
+			return map[ stripe ].put( k, v );
+		}
+		finally {
+			writeLock.unlock();
+		}
+	}
+
+#if #values(primitive) || #keys(primitive)
+
+	public VALUE_GENERIC_CLASS put( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) {
+		final int stripe = KEY2INTHASH( ok ) & mask;
+		final WriteLock writeLock = lock[ stripe ].writeLock();
+		try {
+			writeLock.lock();
+			return map[ stripe ].put( ok, ov );
+		}
+		finally {
+			writeLock.unlock();
+		}
+	}
+
+
+	public VALUE_GENERIC_CLASS putIfAbsent( final KEY_GENERIC_CLASS ok, final VALUE_GENERIC_CLASS ov ) {
+		final int stripe = KEY2INTHASH( ok ) & mask;
+		final WriteLock writeLock = lock[ stripe ].writeLock();
+		try {
+			writeLock.lock();
+			if ( map[ stripe ].containsKey( ok ) ) return map[ stripe ].get( ok );
+			return map[ stripe ].put( ok, ov );
+		}
+		finally {
+			writeLock.unlock();
+		}
+	}
+
+#endif
+
+	public int size() {
+		int size = 0;
+		for( int stripe = lock.length; stripe-- != 0; ) {	
+			final ReadLock readLock = lock[ stripe ].readLock();
+			try {
+				readLock.lock();
+				size += map[ stripe ].size();
+			}
+			finally {
+				readLock.unlock();
+			}
+		}
+
+		return size;
+	}
+
+	public FastEntrySet KEY_VALUE_GENERIC ENTRYSET() {
+		throw new UnsupportedOperationException();
+	}
+}
\ No newline at end of file
diff --git a/drv/TextIO.drv b/drv/TextIO.drv
new file mode 100644
index 0000000..d4281eb
--- /dev/null
+++ b/drv/TextIO.drv
@@ -0,0 +1,74 @@
+/*		 
+ * Copyright (C) 2005-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+package it.unimi.dsi.fastutil.io;
+
+import static it.unimi.dsi.fastutil.BigArrays.SEGMENT_MASK;
+import static it.unimi.dsi.fastutil.BigArrays.segment;
+import static it.unimi.dsi.fastutil.BigArrays.start;
+import java.io.*;
+import java.util.*;
+import it.unimi.dsi.fastutil.booleans.*;
+import it.unimi.dsi.fastutil.bytes.*;
+import it.unimi.dsi.fastutil.shorts.*;
+import it.unimi.dsi.fastutil.ints.*;
+import it.unimi.dsi.fastutil.longs.*;
+import it.unimi.dsi.fastutil.floats.*;
+import it.unimi.dsi.fastutil.doubles.*;
+
+/** Provides static methods to perform easily textual I/O.
+ *
+ * <P>This class fills a gap in the Java API: a natural operation on sequences
+ * of primitive elements is to load or store them in textual form. This format
+ * makes files humanly readable.
+ * 
+ * <P>For each primitive type, this class provides methods that read elements
+ * from a {@link BufferedReader} or from a filename (which will be opened
+ * using a buffer of {@link #BUFFER_SIZE} bytes) into an array. Analogously,
+ * there are methods that store the content of an array (fragment) or the
+ * elements returned by an iterator to a {@link PrintStream} or to a given
+ * filename.
+ *
+ * <P>Finally, there are useful wrapper methods that {@linkplain #asIntIterator(CharSequence)
+ * exhibit a file as a type-specific iterator}.
+ *
+ * <P>Note that, contrarily to the binary case, there is no way to 
+ * {@linkplain BinIO#loadInts(CharSequence) load from a file without providing an array}. You can
+ * easily work around the problem as follows:
+ * <pre>
+ * array = IntIterators.unwrap( TextIO.asIntIterator("foo") );
+ * </pre>
+ *
+ * @since 4.4
+ */
+
+public class TextIO {
+
+	private TextIO() {}
+
+	/** The size of the buffer used for all I/O on files. */
+	final public static int BUFFER_SIZE = 8 * 1024;
+
+#include "src/it/unimi/dsi/fastutil/io/BooleanTextIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/ByteTextIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/ShortTextIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/IntTextIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/LongTextIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/FloatTextIOFragment.h"
+#include "src/it/unimi/dsi/fastutil/io/DoubleTextIOFragment.h"
+
+}
diff --git a/drv/TextIOFragment.drv b/drv/TextIOFragment.drv
new file mode 100644
index 0000000..178a815
--- /dev/null
+++ b/drv/TextIOFragment.drv
@@ -0,0 +1,406 @@
+/*		 
+ * Copyright (C) 2004-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+
+/** Loads elements from a given fast buffered reader, storing them in a given array fragment.
+ *
+ * @param reader a buffered reader.
+ * @param array an array which will be filled with data from <code>reader</code>.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from <code>reader</code> (it might be less than <code>length</code> if <code>reader</code> ends).
+ */
+public static int LOAD_KEYS( final BufferedReader reader, final KEY_TYPE[] array, final int offset, final int length ) throws IOException {
+	PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length );
+	int i = 0;
+	String s;
+	try {
+		for( i = 0; i < length; i++ ) 
+			if ( ( s = reader.readLine() ) != null ) array[ i + offset ] = KEY_CLASS.PARSE_KEY( s );
+			else break;
+	}
+	catch( EOFException itsOk ) {}
+	return i;
+}
+
+/** Loads elements from a given buffered reader, storing them in a given array.
+ *
+ * @param reader a buffered reader.
+ * @param array an array which will be filled with data from <code>reader</code>.
+ * @return the number of elements actually read from <code>reader</code> (it might be less than the array length if <code>reader</code> ends).
+ */
+public static int LOAD_KEYS( final BufferedReader reader, final KEY_TYPE[] array ) throws IOException {
+	return LOAD_KEYS( reader, array, 0, array.length );
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a given array fragment.
+ *
+ * @param file a file.
+ * @param array an array which will be filled with data from the specified file.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from the given file (it might be less than <code>length</code> if the file is too short).
+ */
+public static int LOAD_KEYS( final File file, final KEY_TYPE[] array, final int offset, final int length ) throws IOException {
+	final BufferedReader reader = new BufferedReader( new FileReader( file ) );
+	final int result = LOAD_KEYS( reader, array, offset, length );
+	reader.close();
+
+	return result;
+}
+
+/** Loads elements from a file given by a filename, storing them in a given array fragment.
+ *
+ * @param filename a filename.
+ * @param array an array which will be filled with data from the specified file.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from the given file (it might be less than <code>length</code> if the file is too short).
+ */
+public static int LOAD_KEYS( final CharSequence filename, final KEY_TYPE[] array, final int offset, final int length ) throws IOException {
+	return LOAD_KEYS( new File( filename.toString() ), array, offset, length );
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a given array.
+ *
+ * @param file a file.
+ * @param array an array which will be filled with data from the specified file.
+ * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short).
+ */
+public static int LOAD_KEYS( final File file, final KEY_TYPE[] array ) throws IOException {
+	return LOAD_KEYS( file, array, 0, array.length );
+}
+
+/** Loads elements from a file given by a filename, storing them in a given array.
+ *
+ * @param filename a filename.
+ * @param array an array which will be filled with data from the specified file.
+ * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short).
+ */
+public static int LOAD_KEYS( final CharSequence filename, final KEY_TYPE[] array ) throws IOException {
+	return LOAD_KEYS( filename, array, 0, array.length );
+}
+
+/** Stores an array fragment to a given print stream.
+ *
+ * @param array an array whose elements will be written to <code>stream</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param stream a print stream.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final PrintStream stream ) {
+	PACKAGE.ARRAYS.ensureOffsetLength( array, offset, length );
+	for( int i = 0; i < length; i++ ) stream.println( array[ offset + i ] );
+}
+
+/** Stores an array to a given print stream.
+ *
+ * @param array an array whose elements will be written to <code>stream</code>.
+ * @param stream a print stream.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final PrintStream stream ) {
+	STORE_KEYS( array, 0, array.length, stream );
+}
+
+/** Stores an array fragment to a file given by a {@link File} object.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final File file ) throws IOException {
+	final PrintStream stream = new PrintStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+	STORE_KEYS( array, offset, length, stream );
+	stream.close();
+}
+
+/** Stores an array fragment to a file given by a pathname.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final int offset, final int length, final CharSequence filename ) throws IOException {
+	STORE_KEYS( array, offset, length, new File( filename.toString() ) );
+}
+
+/** Stores an array to a file given by a {@link File} object.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final File file ) throws IOException {
+	STORE_KEYS( array, 0, array.length, file );
+}
+
+/** Stores an array to a file given by a pathname.
+ *
+ * @param array an array whose elements will be written to <code>filename</code>.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[], final CharSequence filename ) throws IOException {
+	STORE_KEYS( array, 0, array.length, filename );
+}
+
+/** Stores the element returned by an iterator to a given print stream.
+ *
+ * @param i an iterator whose output will be written to <code>stream</code>.
+ * @param stream a print stream.
+ */
+public static void STORE_KEYS( final KEY_ITERATOR i, final PrintStream stream ) {
+	while( i.hasNext() ) stream.println( i.NEXT_KEY() );
+}
+
+/** Stores the element returned by an iterator to a file given by a {@link File} object.
+ *
+ * @param i an iterator whose output will be written to <code>filename</code>.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_ITERATOR i, final File file ) throws IOException {
+	final PrintStream stream = new PrintStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+	STORE_KEYS( i, stream );
+	stream.close();
+}
+
+/** Stores the element returned by an iterator to a file given by a pathname.
+ *
+ * @param i an iterator whose output will be written to <code>filename</code>.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_ITERATOR i, final CharSequence filename ) throws IOException {
+	STORE_KEYS( i, new File( filename.toString() ) );
+}
+
+
+/** Loads elements from a given fast buffered reader, storing them in a given big-array fragment.
+ *
+ * @param reader a buffered reader.
+ * @param array a big array which will be filled with data from <code>reader</code>.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from <code>reader</code> (it might be less than <code>length</code> if <code>reader</code> ends).
+ */
+public static long LOAD_KEYS( final BufferedReader reader, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException {
+	PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length );
+	long c = 0;
+	String s;
+	try {
+		for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) {
+			final KEY_TYPE[] t = array[ i ];
+			final int l = (int)Math.min( t.length, offset + length - start( i ) );
+			for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) {
+				if ( ( s = reader.readLine() ) != null ) t[ d ] = KEY_CLASS.PARSE_KEY( s );
+				else return c;
+				c++;
+			}
+		}
+	}
+	catch( EOFException itsOk ) {}
+	return c;
+}
+
+/** Loads elements from a given buffered reader, storing them in a given array.
+ *
+ * @param reader a buffered reader.
+ * @param array a big array which will be filled with data from <code>reader</code>.
+ * @return the number of elements actually read from <code>reader</code> (it might be less than the array length if <code>reader</code> ends).
+ */
+public static long LOAD_KEYS( final BufferedReader reader, final KEY_TYPE[][] array ) throws IOException {
+	return LOAD_KEYS( reader, array, 0, PACKAGE.BIG_ARRAYS.length( array ) );
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a given big-array fragment.
+ *
+ * @param file a file.
+ * @param array a big array which will be filled with data from the specified file.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from the given file (it might be less than <code>length</code> if the file is too short).
+ */
+public static long LOAD_KEYS( final File file, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException {
+	final BufferedReader reader = new BufferedReader( new FileReader( file ) );
+	final long result = LOAD_KEYS( reader, array, offset, length );
+	reader.close();
+
+	return result;
+}
+
+/** Loads elements from a file given by a filename, storing them in a given big-array fragment.
+ *
+ * @param filename a filename.
+ * @param array a big array which will be filled with data from the specified file.
+ * @param offset the index of the first element of <code>array</code> to be filled.
+ * @param length the number of elements of <code>array</code> to be filled.
+ * @return the number of elements actually read from the given file (it might be less than <code>length</code> if the file is too short).
+ */
+public static long LOAD_KEYS( final CharSequence filename, final KEY_TYPE[][] array, final long offset, final long length ) throws IOException {
+	return LOAD_KEYS( new File( filename.toString() ), array, offset, length );
+}
+
+/** Loads elements from a file given by a {@link File} object, storing them in a given array.
+ *
+ * @param file a file.
+ * @param array a big array which will be filled with data from the specified file.
+ * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short).
+ */
+public static long LOAD_KEYS( final File file, final KEY_TYPE[][] array ) throws IOException {
+	return LOAD_KEYS( file, array, 0, PACKAGE.BIG_ARRAYS.length( array ) );
+}
+
+/** Loads elements from a file given by a filename, storing them in a given array.
+ *
+ * @param filename a filename.
+ * @param array a big array which will be filled with data from the specified file.
+ * @return the number of elements actually read from the given file (it might be less than the array length if the file is too short).
+ */
+public static long LOAD_KEYS( final CharSequence filename, final KEY_TYPE[][] array ) throws IOException {
+	return LOAD_KEYS( filename, array, 0, PACKAGE.BIG_ARRAYS.length( array ) );
+}
+
+/** Stores a big-array fragment to a given print stream.
+ *
+ * @param array a big array whose elements will be written to <code>stream</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param stream a print stream.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final PrintStream stream ) {
+	PACKAGE.BIG_ARRAYS.ensureOffsetLength( array, offset, length );
+
+	for( int i = segment( offset ); i < segment( offset + length + SEGMENT_MASK ); i++ ) {
+		final KEY_TYPE[] t = array[ i ];
+		final int l = (int)Math.min( t.length, offset + length - start( i ) );
+		for( int d = (int)Math.max( 0, offset - start( i ) ); d < l; d++ ) stream.println( t[ d ] );
+	}
+}
+
+/** Stores a big array to a given print stream.
+ *
+ * @param array a big array whose elements will be written to <code>stream</code>.
+ * @param stream a print stream.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final PrintStream stream ) {
+	STORE_KEYS( array, 0, PACKAGE.BIG_ARRAYS.length( array ), stream );
+}
+
+/** Stores a big-array fragment to a file given by a {@link File} object.
+ *
+ * @param array a big array whose elements will be written to <code>filename</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final File file ) throws IOException {
+	final PrintStream stream = new PrintStream( new FastBufferedOutputStream( new FileOutputStream( file ) ) );
+	STORE_KEYS( array, offset, length, stream );
+	stream.close();
+}
+
+/** Stores a big-array fragment to a file given by a pathname.
+ *
+ * @param array a big array whose elements will be written to <code>filename</code>.
+ * @param offset the index of the first element of <code>array</code> to be written.
+ * @param length the number of elements of <code>array</code> to be written.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final long offset, final long length, final CharSequence filename ) throws IOException {
+	STORE_KEYS( array, offset, length, new File( filename.toString() ) );
+}
+
+/** Stores a big array to a file given by a {@link File} object.
+ *
+ * @param array a big array whose elements will be written to <code>filename</code>.
+ * @param file a file.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final File file ) throws IOException {
+	STORE_KEYS( array, 0, PACKAGE.BIG_ARRAYS.length( array ), file );
+}
+
+/** Stores a big array to a file given by a pathname.
+ *
+ * @param array a big array whose elements will be written to <code>filename</code>.
+ * @param filename a filename.
+ */
+public static void STORE_KEYS( final KEY_TYPE array[][], final CharSequence filename ) throws IOException {
+	STORE_KEYS( array, 0, PACKAGE.BIG_ARRAYS.length( array ), filename );
+}
+
+
+/** A wrapper that exhibits the content of a reader as a type-specific iterator. */
+
+final private static class KEY_READER_WRAPPER extends KEY_ABSTRACT_ITERATOR {
+	final private BufferedReader reader;
+	private boolean toAdvance = true;
+	private String s;
+	private KEY_TYPE next;
+
+	public KEY_READER_WRAPPER( final BufferedReader reader ) {
+		this.reader = reader;
+	}
+
+	public boolean hasNext() {
+		if ( ! toAdvance ) return s != null;
+
+		toAdvance = false;
+
+		try {
+			s = reader.readLine();
+		}
+		catch( EOFException itsOk ) {}
+		catch( IOException rethrow ) { throw new RuntimeException( rethrow ); }
+
+		if ( s == null ) return false;
+
+		next = KEY_CLASS.PARSE_KEY( s );
+		return true;
+	}
+
+	public KEY_TYPE NEXT_KEY() {
+		if (! hasNext()) throw new NoSuchElementException();
+		toAdvance = true;
+		return next;
+	}
+}
+
+
+
+/** Wraps the given buffered reader into an iterator.
+ *
+ * @param reader a buffered reader.
+ */
+public static KEY_ITERATOR AS_KEY_ITERATOR( final BufferedReader reader ) {
+	return new KEY_READER_WRAPPER( reader );
+}
+
+/** Wraps a file given by a {@link File} object into an iterator.
+ *
+ * @param file a file.
+ */
+public static KEY_ITERATOR AS_KEY_ITERATOR( final File file ) throws IOException {
+	return new KEY_READER_WRAPPER( new BufferedReader( new FileReader( file ) ) );
+}
+
+/** Wraps a file given by a pathname into an iterator.
+ *
+ * @param filename a filename.
+ */
+public static KEY_ITERATOR AS_KEY_ITERATOR( final CharSequence filename ) throws IOException {
+	return AS_KEY_ITERATOR( new File( filename.toString() ) );
+}
+
diff --git a/gencsource.sh b/gencsource.sh
new file mode 100755
index 0000000..1cd98de
--- /dev/null
+++ b/gencsource.sh
@@ -0,0 +1,614 @@
+#!/bin/bash
+
+#
+# This script generates from driver files fake C sources to be passed
+# through a C preprocessor to get the actual Java sources. It expects
+# as arguments the name of the driver and the name of the file to be 
+# generated.
+#
+
+# The types we specialise to (these are actual Java types, so references appear here as Object).
+TYPE=(boolean byte short int long char float double Object Object)
+
+# The capitalized types used to build class and method names (now references appear as Reference).
+TYPE_CAP=(Boolean Byte Short Int Long Char Float Double Object Reference)
+
+# Much like $TYPE_CAP, by the place occupied by Reference is now occupied by Object.
+TYPE_CAP2=(Boolean Byte Short Int Long Char Float Double Object Object)
+
+# Much like $TYPE_CAP, but object type get the empty string.
+TYPE_STD=(Boolean Byte Short Int Long Char Float Double "" "")
+
+# The upper case types used to build class and method names.
+TYPE_UC=(BOOLEAN BYTE SHORT INT LONG CHAR FLOAT DOUBLE OBJECT REFERENCE)
+
+# The downcased types used to build method names.
+TYPE_LC=(boolean byte short int long char float double object reference)
+
+# Much like $TYPE_LC, by the place occupied by reference is now occupied by object.
+TYPE_LC2=(boolean byte short int long char float double object object)
+
+# The corresponding classes (in few cases, there are differences with $TYPE_CAP).
+CLASS=(Boolean Byte Short Integer Long Character Float Double Object Reference)
+
+export LC_ALL=C
+shopt -s extglob
+
+file=${2##*/}
+name=${file%.*}
+class=${name#Abstract}
+if [[ "$class" == "$name" ]]; then
+    abstract=
+else
+    abstract=Abstract
+fi
+class=${class#Striped}
+
+# Now we rip off the types.
+rem=${class##[A-Z]+([a-z])}
+keylen=$(( ${#class} - ${#rem} ))
+root=$rem
+
+KEY_TYPE_CAP=${class:0:$keylen}
+VALUE_TYPE_CAP=Object # Just for filling holes
+
+if [[ "${rem:0:1}" == "2" ]]; then
+    isFunction=true
+    rem=${rem:1}
+    rem2=${rem##[A-Z]+([a-z])}
+    valuelen=$(( ${#rem} - ${#rem2} ))
+    VALUE_TYPE_CAP=${rem:0:$valuelen}
+    root=$rem2
+else
+    isFunction=false
+fi
+
+for((k=0; k<${#TYPE_CAP[*]}; k++)); do
+    if [[ ${TYPE_CAP[$k]} == $KEY_TYPE_CAP ]]; then break; fi;
+done
+
+for((v=0; v<${#TYPE_CAP[*]}; v++)); do
+    if [[ ${TYPE_CAP[$v]} == $VALUE_TYPE_CAP ]]; then break; fi;
+done
+
+if [[ $root == *Linked* ]]; then 
+Linked=Linked
+echo -e \
+"#define SET_PREV( f64, p32 )       SET_UPPER( f64, p32 )\n"\
+"#define SET_NEXT( f64, n32 )       SET_LOWER( f64, n32 )\n"\
+"#define COPY_PREV( f64, p64 )      SET_UPPER64( f64, p64 )\n"\
+"#define COPY_NEXT( f64, n64 )      SET_LOWER64( f64, n64 )\n"\
+"#define GET_PREV( f64 )            GET_UPPER( f64 )\n"\
+"#define GET_NEXT( f64 )            GET_LOWER( f64 )\n"\
+"#define SET_UPPER_LOWER( f64, up32, low32 )    f64 = ( ( up32 & 0xFFFFFFFFL ) << 32 ) | ( low32 & 0xFFFFFFFFL )\n"\
+"#define SET_UPPER( f64, up32 )     f64 ^= ( ( f64 ^ ( ( up32 & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L )\n"\
+"#define SET_LOWER( f64, low32 )    f64 ^= ( ( f64 ^ ( low32 & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL )\n"\
+"#define SET_UPPER64( f64, up64 )   f64 ^= ( ( f64 ^ ( up64 & 0xFFFFFFFF00000000L ) ) & 0xFFFFFFFF00000000L )\n"\
+"#define SET_LOWER64( f64, low64 )  f64 ^= ( ( f64 ^ ( low64 & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL )\n"\
+"#define GET_UPPER( f64 )           (int) ( f64 >>> 32 )\n"\
+"#define GET_LOWER( f64 )           (int) f64\n"
+fi
+
+if [[ $root == *Custom* ]]; then Custom=Custom; fi
+
+echo -e \
+\
+\
+"/* Generic definitions */\n"\
+\
+\
+"${Linked:+#define Linked}\n"\
+"${Custom:+#define Custom}\n"\
+"#define PACKAGE it.unimi.dsi.fastutil.${TYPE_LC2[$k]}s\n"\
+"#define VALUE_PACKAGE it.unimi.dsi.fastutil.${TYPE_LC2[$v]}s\n"\
+\
+\
+"/* Assertions (useful to generate conditional code) */\n"\
+\
+\
+$(if [[ "${CLASS[$k]}" != "" ]]; then\
+	echo "#unassert keyclass\\n#assert keyclass(${CLASS[$k]})\\n#unassert keys\\n";\
+	if [[ "${CLASS[$k]}" != "Object" && "${CLASS[$k]}" != "Reference" ]]; then\
+		echo "#assert keys(primitive)\\n";\
+	else\
+		echo "#assert keys(reference)\\n";\
+	fi;\
+ fi)\
+$(if [[ "${CLASS[$v]}" != "" ]]; then\
+	echo "#unassert valueclass\\n#assert valueclass(${CLASS[$v]})\\n#unassert values\\n";\
+	if [[ "${CLASS[$v]}" != "Object" && "${CLASS[$v]}" != "Reference" ]]; then\
+		echo "#assert values(primitive)\\n";\
+	else\
+		echo "#assert values(reference)\\n";\
+	fi;\
+ fi)\
+\
+\
+"/* Current type and class (and size, if applicable) */\n"\
+\
+\
+"#define KEY_TYPE ${TYPE[$k]}\n"\
+"#define VALUE_TYPE ${TYPE[$v]}\n"\
+"#define KEY_CLASS ${CLASS[$k]}\n"\
+"#define VALUE_CLASS ${CLASS[$v]}\n"\
+\
+\
+"#if #keyclass(Object) || #keyclass(Reference)\n"\
+"#define KEY_GENERIC_CLASS K\n"\
+"#define KEY_GENERIC_TYPE K\n"\
+"#define KEY_GENERIC <K>\n"\
+"#define KEY_GENERIC_WILDCARD <?>\n"\
+"#define KEY_EXTENDS_GENERIC <? extends K>\n"\
+"#define KEY_SUPER_GENERIC <? super K>\n"\
+"#define KEY_GENERIC_CAST (K)\n"\
+"#define KEY_GENERIC_ARRAY_CAST (K[])\n"\
+"#define KEY_GENERIC_BIG_ARRAY_CAST (K[][])\n"\
+"#else\n"\
+"#define KEY_GENERIC_CLASS KEY_CLASS\n"\
+"#define KEY_GENERIC_TYPE KEY_TYPE\n"\
+"#define KEY_GENERIC\n"\
+"#define KEY_GENERIC_WILDCARD\n"\
+"#define KEY_EXTENDS_GENERIC\n"\
+"#define KEY_SUPER_GENERIC\n"\
+"#define KEY_GENERIC_CAST\n"\
+"#define KEY_GENERIC_ARRAY_CAST\n"\
+"#define KEY_GENERIC_BIG_ARRAY_CAST\n"\
+"#endif\n"\
+\
+"#if #valueclass(Object) || #valueclass(Reference)\n"\
+"#define VALUE_GENERIC_CLASS V\n"\
+"#define VALUE_GENERIC_TYPE V\n"\
+"#define VALUE_GENERIC <V>\n"\
+"#define VALUE_EXTENDS_GENERIC <? extends V>\n"\
+"#define VALUE_GENERIC_CAST (V)\n"\
+"#define VALUE_GENERIC_ARRAY_CAST (V[])\n"\
+"#else\n"\
+"#define VALUE_GENERIC_CLASS VALUE_CLASS\n"\
+"#define VALUE_GENERIC_TYPE VALUE_TYPE\n"\
+"#define VALUE_GENERIC\n"\
+"#define VALUE_EXTENDS_GENERIC\n"\
+"#define VALUE_GENERIC_CAST\n"\
+"#define VALUE_GENERIC_ARRAY_CAST\n"\
+"#endif\n"\
+\
+"#if #keyclass(Object) || #keyclass(Reference)\n"\
+"#if #valueclass(Object) || #valueclass(Reference)\n"\
+"#define KEY_VALUE_GENERIC <K,V>\n"\
+"#define KEY_VALUE_EXTENDS_GENERIC <? extends K, ? extends V>\n"\
+"#else\n"\
+"#define KEY_VALUE_GENERIC <K>\n"\
+"#define KEY_VALUE_EXTENDS_GENERIC <? extends K>\n"\
+"#endif\n"\
+"#else\n"\
+"#if #valueclass(Object) || #valueclass(Reference)\n"\
+"#define KEY_VALUE_GENERIC <V>\n"\
+"#define KEY_VALUE_EXTENDS_GENERIC <? extends V>\n"\
+"#else\n"\
+"#define KEY_VALUE_GENERIC\n"\
+"#define KEY_VALUE_EXTENDS_GENERIC\n"\
+"#endif\n"\
+"#endif\n"\
+\
+\
+"/* Value methods */\n"\
+\
+\
+"#define KEY_VALUE ${TYPE[$k]}Value\n"\
+"#define VALUE_VALUE ${TYPE[$v]}Value\n"\
+\
+\
+"/* Interfaces (keys) */\n"\
+\
+\
+"#define COLLECTION ${TYPE_CAP[$k]}Collection\n\n"\
+"#define SET ${TYPE_CAP[$k]}Set\n\n"\
+"#define HASH ${TYPE_CAP[$k]}Hash\n\n"\
+"#define SORTED_SET ${TYPE_CAP[$k]}SortedSet\n\n"\
+"#define STD_SORTED_SET ${TYPE_STD[$k]}SortedSet\n\n"\
+"#define FUNCTION ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\
+"#define MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n"\
+"#define SORTED_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n"\
+"#if #keyclass(Object) || #keyclass(Reference)\n"\
+"#define STD_SORTED_MAP SortedMap\n\n"\
+"#define STRATEGY Strategy\n\n"\
+"#else\n"\
+"#define STD_SORTED_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n\n"\
+"#define STRATEGY PACKAGE.${TYPE_CAP[$k]}Hash.Strategy\n\n"\
+"#endif\n"\
+"#define LIST ${TYPE_CAP[$k]}List\n\n"\
+"#define BIG_LIST ${TYPE_CAP[$k]}BigList\n\n"\
+"#define STACK ${TYPE_STD[$k]}Stack\n\n"\
+"#define PRIORITY_QUEUE ${TYPE_STD[$k]}PriorityQueue\n\n"\
+"#define INDIRECT_PRIORITY_QUEUE ${TYPE_STD[$k]}IndirectPriorityQueue\n\n"\
+"#define INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_STD[$k]}IndirectDoublePriorityQueue\n\n"\
+"#define KEY_ITERATOR ${TYPE_CAP2[$k]}Iterator\n\n"\
+"#define KEY_ITERABLE ${TYPE_CAP2[$k]}Iterable\n\n"\
+"#define KEY_BIDI_ITERATOR ${TYPE_CAP2[$k]}BidirectionalIterator\n\n"\
+"#define KEY_LIST_ITERATOR ${TYPE_CAP2[$k]}ListIterator\n\n"\
+"#define KEY_BIG_LIST_ITERATOR ${TYPE_CAP2[$k]}BigListIterator\n\n"\
+"#define STD_KEY_ITERATOR ${TYPE_STD[$k]}Iterator\n\n"\
+"#define KEY_COMPARATOR ${TYPE_STD[$k]}Comparator\n\n"\
+\
+\
+"/* Interfaces (values) */\n"\
+\
+\
+"#define VALUE_COLLECTION ${TYPE_CAP[$v]}Collection\n\n"\
+"#define VALUE_ARRAY_SET ${TYPE_CAP[$v]}ArraySet\n\n"\
+"#define VALUE_ITERATOR ${TYPE_CAP2[$v]}Iterator\n\n"\
+"#define VALUE_LIST_ITERATOR ${TYPE_CAP2[$v]}ListIterator\n\n"\
+\
+\
+"/* Abstract implementations (keys) */\n"\
+\
+\
+"#define ABSTRACT_COLLECTION Abstract${TYPE_CAP[$k]}Collection\n\n"\
+"#define ABSTRACT_SET Abstract${TYPE_CAP[$k]}Set\n\n"\
+"#define ABSTRACT_SORTED_SET Abstract${TYPE_CAP[$k]}SortedSet\n"\
+"#define ABSTRACT_FUNCTION Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\
+"#define ABSTRACT_MAP Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n"\
+"#define ABSTRACT_FUNCTION Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\
+"#define ABSTRACT_SORTED_MAP Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n"\
+"#define ABSTRACT_LIST Abstract${TYPE_CAP[$k]}List\n\n"\
+"#define ABSTRACT_BIG_LIST Abstract${TYPE_CAP[$k]}BigList\n\n"\
+"#define SUBLIST ${TYPE_CAP[$k]}SubList\n\n"\
+"#define ABSTRACT_PRIORITY_QUEUE Abstract${TYPE_STD[$k]}PriorityQueue\n\n"\
+"#define ABSTRACT_STACK Abstract${TYPE_STD[$k]}Stack\n\n"\
+"#define KEY_ABSTRACT_ITERATOR Abstract${TYPE_CAP2[$k]}Iterator\n\n"\
+"#define KEY_ABSTRACT_BIDI_ITERATOR Abstract${TYPE_CAP2[$k]}BidirectionalIterator\n\n"\
+"#define KEY_ABSTRACT_LIST_ITERATOR Abstract${TYPE_CAP2[$k]}ListIterator\n\n"\
+"#define KEY_ABSTRACT_BIG_LIST_ITERATOR Abstract${TYPE_CAP2[$k]}BigListIterator\n\n"\
+"#if #keyclass(Object)\n"\
+"#define KEY_ABSTRACT_COMPARATOR Comparator\n\n"\
+"#else\n"\
+"#define KEY_ABSTRACT_COMPARATOR Abstract${TYPE_CAP[$k]}Comparator\n\n"\
+"#endif\n"\
+\
+\
+"/* Abstract implementations (values) */\n"\
+\
+\
+"#define VALUE_ABSTRACT_COLLECTION Abstract${TYPE_CAP[$v]}Collection\n\n"\
+"#define VALUE_ABSTRACT_ITERATOR Abstract${TYPE_CAP2[$v]}Iterator\n\n"\
+"#define VALUE_ABSTRACT_BIDI_ITERATOR Abstract${TYPE_CAP2[$v]}BidirectionalIterator\n\n"\
+\
+\
+"/* Static containers (keys) */\n"\
+\
+\
+"#define COLLECTIONS ${TYPE_CAP[$k]}Collections\n\n"\
+"#define SETS ${TYPE_CAP[$k]}Sets\n\n"\
+"#define SORTED_SETS ${TYPE_CAP[$k]}SortedSets\n\n"\
+"#define LISTS ${TYPE_CAP[$k]}Lists\n\n"\
+"#define BIG_LISTS ${TYPE_CAP[$k]}BigLists\n\n"\
+"#define MAPS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Maps\n"\
+"#define FUNCTIONS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Functions\n"\
+"#define SORTED_MAPS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMaps\n"\
+"#define PRIORITY_QUEUES ${TYPE_CAP2[$k]}PriorityQueues\n\n"\
+"#define HEAPS ${TYPE_CAP2[$k]}Heaps\n\n"\
+"#define SEMI_INDIRECT_HEAPS ${TYPE_CAP2[$k]}SemiIndirectHeaps\n\n"\
+"#define INDIRECT_HEAPS ${TYPE_CAP2[$k]}IndirectHeaps\n\n"\
+"#define ARRAYS ${TYPE_CAP2[$k]}Arrays\n\n"\
+"#define BIG_ARRAYS ${TYPE_CAP2[$k]}BigArrays\n\n"\
+"#define ITERATORS ${TYPE_CAP2[$k]}Iterators\n\n"\
+"#define BIG_LIST_ITERATORS ${TYPE_CAP2[$k]}BigListIterators\n\n"\
+"#define COMPARATORS ${TYPE_CAP2[$k]}Comparators\n\n"\
+\
+\
+"/* Static containers (values) */\n"\
+\
+\
+"#define VALUE_COLLECTIONS ${TYPE_CAP[$v]}Collections\n\n"\
+"#define VALUE_SETS ${TYPE_CAP[$v]}Sets\n\n"\
+"#define VALUE_ARRAYS ${TYPE_CAP2[$v]}Arrays\n\n"\
+\
+\
+"/* Implementations */\n"\
+\
+\
+"#define OPEN_HASH_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}HashSet\n\n"\
+"#define OPEN_HASH_BIG_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}HashBigSet\n\n"\
+"#define OPEN_DOUBLE_HASH_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}DoubleHashSet\n\n"\
+"#define OPEN_HASH_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}${Linked}Open${Custom}HashMap\n\n"\
+"#define STRIPED_OPEN_HASH_MAP Striped${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Open${Custom}HashMap\n\n"\
+"#define OPEN_DOUBLE_HASH_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}${Linked}Open${Custom}DoubleHashMap\n\n"\
+"#define ARRAY_SET ${TYPE_CAP[$k]}ArraySet\n\n"\
+"#define ARRAY_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}ArrayMap\n\n"\
+"#define LINKED_OPEN_HASH_SET ${TYPE_CAP[$k]}LinkedOpenHashSet\n\n"\
+"#define AVL_TREE_SET ${TYPE_CAP[$k]}AVLTreeSet\n\n"\
+"#define RB_TREE_SET ${TYPE_CAP[$k]}RBTreeSet\n\n"\
+"#define AVL_TREE_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}AVLTreeMap\n\n"\
+"#define RB_TREE_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}RBTreeMap\n\n"\
+"#define ARRAY_LIST ${TYPE_CAP[$k]}ArrayList\n\n"\
+"#define BIG_ARRAY_BIG_LIST ${TYPE_CAP[$k]}BigArrayBigList\n\n"\
+"#define ARRAY_FRONT_CODED_LIST ${TYPE_CAP[$k]}ArrayFrontCodedList\n\n"\
+"#define HEAP_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapPriorityQueue\n\n"\
+"#define HEAP_SEMI_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapSemiIndirectPriorityQueue\n\n"\
+"#define HEAP_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapIndirectPriorityQueue\n\n"\
+"#define HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapSesquiIndirectDoublePriorityQueue\n\n"\
+"#define HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapIndirectDoublePriorityQueue\n\n"\
+"#define ARRAY_FIFO_QUEUE ${TYPE_CAP2[$k]}ArrayFIFOQueue\n\n"\
+"#define ARRAY_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayPriorityQueue\n\n"\
+"#define ARRAY_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayIndirectPriorityQueue\n\n"\
+"#define ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayIndirectDoublePriorityQueue\n\n"\
+\
+\
+"/* Synchronized wrappers */\n"\
+\
+\
+"#define SYNCHRONIZED_COLLECTION Synchronized${TYPE_CAP[$k]}Collection\n\n"\
+"#define SYNCHRONIZED_SET Synchronized${TYPE_CAP[$k]}Set\n\n"\
+"#define SYNCHRONIZED_SORTED_SET Synchronized${TYPE_CAP[$k]}SortedSet\n\n"\
+"#define SYNCHRONIZED_FUNCTION Synchronized${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n\n"\
+"#define SYNCHRONIZED_MAP Synchronized${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n\n"\
+"#define SYNCHRONIZED_LIST Synchronized${TYPE_CAP[$k]}List\n\n"\
+\
+\
+"/* Unmodifiable wrappers */\n"\
+\
+\
+"#define UNMODIFIABLE_COLLECTION Unmodifiable${TYPE_CAP[$k]}Collection\n\n"\
+"#define UNMODIFIABLE_SET Unmodifiable${TYPE_CAP[$k]}Set\n\n"\
+"#define UNMODIFIABLE_SORTED_SET Unmodifiable${TYPE_CAP[$k]}SortedSet\n\n"\
+"#define UNMODIFIABLE_FUNCTION Unmodifiable${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n\n"\
+"#define UNMODIFIABLE_MAP Unmodifiable${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n\n"\
+"#define UNMODIFIABLE_LIST Unmodifiable${TYPE_CAP[$k]}List\n\n"\
+"#define UNMODIFIABLE_KEY_ITERATOR Unmodifiable${TYPE_CAP[$k]}Iterator\n\n"\
+"#define UNMODIFIABLE_KEY_BIDI_ITERATOR Unmodifiable${TYPE_CAP[$k]}BidirectionalIterator\n\n"\
+"#define UNMODIFIABLE_KEY_LIST_ITERATOR Unmodifiable${TYPE_CAP[$k]}ListIterator\n\n"\
+\
+\
+"/* Other wrappers */\n"\
+\
+\
+"#define KEY_READER_WRAPPER ${TYPE_CAP[$k]}ReaderWrapper\n\n"\
+"#define KEY_DATA_INPUT_WRAPPER ${TYPE_CAP[$k]}DataInputWrapper\n\n"\
+\
+\
+"/* Methods (keys) */\n"\
+\
+\
+"#define NEXT_KEY next${TYPE_STD[$k]}\n"\
+"#define PREV_KEY previous${TYPE_STD[$k]}\n"\
+"#define FIRST_KEY first${TYPE_STD[$k]}Key\n"\
+"#define LAST_KEY last${TYPE_STD[$k]}Key\n"\
+"#define GET_KEY get${TYPE_STD[$k]}\n"\
+"#define REMOVE_KEY remove${TYPE_STD[$k]}\n"\
+"#define READ_KEY read${TYPE_CAP2[$k]}\n"\
+"#define WRITE_KEY write${TYPE_CAP2[$k]}\n"\
+"#define DEQUEUE dequeue${TYPE_STD[$k]}\n"\
+"#define DEQUEUE_LAST dequeueLast${TYPE_STD[$k]}\n"\
+"#define SUBLIST_METHOD ${TYPE_LC[$k]}SubList\n"\
+"#define SINGLETON_METHOD ${TYPE_LC[$k]}Singleton\n\n"\
+"#define FIRST first${TYPE_STD[$k]}\n"\
+"#define LAST last${TYPE_STD[$k]}\n"\
+"#define TOP top${TYPE_STD[$k]}\n"\
+"#define PEEK peek${TYPE_STD[$k]}\n"\
+"#define POP pop${TYPE_STD[$k]}\n"\
+"#define KEY_ITERATOR_METHOD ${TYPE_LC2[$k]}Iterator\n\n"\
+"#define KEY_LIST_ITERATOR_METHOD ${TYPE_LC2[$k]}ListIterator\n\n"\
+"#define KEY_EMPTY_ITERATOR_METHOD empty${TYPE_CAP2[$k]}Iterator\n\n"\
+"#define AS_KEY_ITERATOR as${TYPE_CAP2[$k]}Iterator\n\n"\
+"#define TO_KEY_ARRAY to${TYPE_STD[$k]}Array\n"\
+"#define ENTRY_GET_KEY get${TYPE_STD[$k]}Key\n"\
+"#define REMOVE_FIRST_KEY removeFirst${TYPE_STD[$k]}\n"\
+"#define REMOVE_LAST_KEY removeLast${TYPE_STD[$k]}\n"\
+"#define PARSE_KEY parse${TYPE_STD[$k]}\n"\
+"#define LOAD_KEYS load${TYPE_STD[$k]}s\n"\
+"#define LOAD_KEYS_BIG load${TYPE_STD[$k]}sBig\n"\
+"#define STORE_KEYS store${TYPE_STD[$k]}s\n"\
+\
+\
+"/* Methods (values) */\n"\
+\
+\
+"#define NEXT_VALUE next${TYPE_STD[$v]}\n"\
+"#define PREV_VALUE previous${TYPE_STD[$v]}\n"\
+"#define READ_VALUE read${TYPE_CAP2[$v]}\n"\
+"#define WRITE_VALUE write${TYPE_CAP2[$v]}\n"\
+"#define VALUE_ITERATOR_METHOD ${TYPE_LC2[$v]}Iterator\n\n"\
+"#define ENTRY_GET_VALUE get${TYPE_STD[$v]}Value\n"\
+"#define REMOVE_FIRST_VALUE removeFirst${TYPE_STD[$v]}\n"\
+"#define REMOVE_LAST_VALUE removeLast${TYPE_STD[$v]}\n"\
+\
+\
+"/* Methods (keys/values) */\n"\
+\
+\
+"#define ENTRYSET ${TYPE_LC[$k]}2${TYPE_CAP[$v]}EntrySet\n"\
+\
+\
+"/* Methods that have special names depending on keys (but the special names depend on values) */\n"\
+\
+\
+"#if #keyclass(Object) || #keyclass(Reference)\n"\
+"#define GET_VALUE get${TYPE_STD[$v]}\n"\
+"#define REMOVE_VALUE remove${TYPE_STD[$v]}\n"\
+"#else\n"\
+"#define GET_VALUE get\n"\
+"#define REMOVE_VALUE remove\n"\
+"#endif\n"\
+\
+\
+\
+"/* Equality */\n"\
+\
+\
+\
+"#ifdef Custom\n"\
+"#define KEY_EQUALS(x,y) ( strategy.equals( (x), " KEY_GENERIC_CAST "(y) ) )\n"\
+"#else\n"\
+"#if #keyclass(Object)\n"\
+"#define KEY_EQUALS(x,y) ( (x) == null ? (y) == null : (x).equals(y) )\n"\
+"#define KEY_EQUALS_NOT_NULL(x,y) ( (x).equals(y) )\n"\
+"#else\n"\
+"#define KEY_EQUALS(x,y) ( (x) == (y) )\n"\
+"#define KEY_EQUALS_NOT_NULL(x,y) ( (x) == (y) )\n"\
+"#endif\n"\
+"#endif\n\n"\
+\
+"#if #valueclass(Object)\n"\
+"#define VALUE_EQUALS(x,y) ( (x) == null ? (y) == null : (x).equals(y) )\n"\
+"#else\n"\
+"#define VALUE_EQUALS(x,y) ( (x) == (y) )\n"\
+"#endif\n\n"\
+\
+\
+\
+"/* Object/Reference-only definitions (keys) */\n"\
+\
+\
+"#if #keyclass(Object) || #keyclass(Reference)\n"\
+\
+"#define REMOVE remove\n"\
+\
+"#define KEY_OBJ2TYPE(x) (x)\n"\
+"#define KEY_CLASS2TYPE(x) (x)\n"\
+"#define KEY2OBJ(x) (x)\n"\
+\
+"#if #keyclass(Object)\n"\
+"#ifdef Custom\n"\
+"#define KEY2JAVAHASH(x) ( strategy.hashCode(" KEY_GENERIC_CAST "(x)) )\n"\
+"#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(" KEY_GENERIC_CAST "(x)) ) )\n"\
+"#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)strategy.hashCode(" KEY_GENERIC_CAST "(x)) ) )\n"\
+"#else\n"\
+"#define KEY2JAVAHASH(x) ( (x) == null ? 0 : (x).hashCode() )\n"\
+"#define KEY2INTHASH(x) ( (x) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (x).hashCode() ) )\n"\
+"#define KEY2LONGHASH(x) ( (x) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(x).hashCode() ) )\n"\
+"#endif\n"\
+"#else\n"\
+"#define KEY2JAVAHASH(x) ( (x) == null ? 0 : System.identityHashCode(x) )\n"\
+"#define KEY2INTHASH(x) ( (x) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(x) ) )\n"\
+"#define KEY2LONGHASH(x) ( (x) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)System.identityHashCode(x) ) )\n"\
+"#endif\n"\
+\
+"#define KEY_CMP(x,y) ( ((Comparable<KEY_GENERIC_CLASS>)(x)).compareTo(y) )\n"\
+"#define KEY_CMP_EQ(x,y) ( ((Comparable<KEY_GENERIC_CLASS>)(x)).compareTo(y) == 0 )\n"\
+"#define KEY_LESS(x,y) ( ((Comparable<KEY_GENERIC_CLASS>)(x)).compareTo(y) < 0 )\n"\
+"#define KEY_LESSEQ(x,y) ( ((Comparable<KEY_GENERIC_CLASS>)(x)).compareTo(y) <= 0 )\n"\
+\
+"#define KEY_NULL (null)\n"\
+\
+\
+"#else\n"\
+\
+\
+"/* Primitive-type-only definitions (keys) */\n"\
+\
+\
+"#define REMOVE rem\n"\
+\
+"#define KEY_CLASS2TYPE(x) ((x).KEY_VALUE())\n"\
+"#define KEY_OBJ2TYPE(x) (KEY_CLASS2TYPE((KEY_CLASS)(x)))\n"\
+"#define KEY2OBJ(x) (KEY_CLASS.valueOf(x))\n"\
+\
+"#if #keyclass(Boolean)\n"\
+"#define KEY_CMP_EQ(x,y) ( (x) == (y) )\n"\
+"#define KEY_NULL (false)\n"\
+"#define KEY_CMP(x,y) ( !(x) && (y) ? -1 : ( (x) == (y) ? 0 : 1 ) )\n"\
+"#define KEY_LESS(x,y) ( !(x) && (y) )\n"\
+"#define KEY_LESSEQ(x,y) ( !(x) || (y) )\n"\
+"#else\n"\
+"#define KEY_NULL ((KEY_TYPE)0)\n"\
+"#if #keyclass(Float) || #keyclass(Double)\n"\
+"#define KEY_CMP_EQ(x,y) ( KEY_CLASS.compare((x),(y)) == 0 )\n"\
+"#define KEY_CMP(x,y) ( KEY_CLASS.compare((x),(y)) )\n"\
+"#define KEY_LESS(x,y) ( KEY_CLASS.compare((x),(y)) < 0 )\n"\
+"#define KEY_LESSEQ(x,y) ( KEY_CLASS.compare((x),(y)) <= 0 )\n"\
+"#else\n"\
+"#define KEY_CMP_EQ(x,y) ( (x) == (y) )\n"\
+"#define KEY_CMP(x,y) ( (x) < (y) ? -1 : ( (x) == (y) ? 0 : 1 ) )\n"\
+"#define KEY_LESS(x,y) ( (x) < (y) )\n"\
+"#define KEY_LESSEQ(x,y) ( (x) <= (y) )\n"\
+"#endif\n"\
+\
+"#if #keyclass(Float)\n"\
+"#define KEY2LEXINT(x) fixFloat(x)\n"\
+"#elif #keyclass(Double)\n"\
+"#define KEY2LEXINT(x) fixDouble(x)\n"\
+"#else\n"\
+"#define KEY2LEXINT(x) (x)\n"\
+"#endif\n"\
+\
+"#endif\n"\
+\
+"#ifdef Custom\n"\
+"#define KEY2JAVAHASH(x) ( strategy.hashCode(x) )\n"\
+"#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(x) ) )\n"\
+"#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)strategy.hashCode(x) ) )\n"\
+"#else\n"\
+\
+"#if #keyclass(Float)\n"\
+"#define KEY2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.float2int(x)\n"\
+"#define KEY2INTHASH(x) it.unimi.dsi.fastutil.HashCommon.murmurHash3( it.unimi.dsi.fastutil.HashCommon.float2int(x) )\n"\
+"#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)it.unimi.dsi.fastutil.HashCommon.float2int(x) )\n"\
+"#elif #keyclass(Double)\n"\
+"#define KEY2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.double2int(x)\n"\
+"#define KEY2INTHASH(x) (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(x))\n"\
+"#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(x))\n"\
+"#elif #keyclass(Long)\n"\
+"#define KEY2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.long2int(x)\n"\
+"#define KEY2INTHASH(x) (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(x)\n"\
+"#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.murmurHash3(x)\n"\
+"#elif #keyclass(Boolean)\n"\
+"#define KEY2JAVAHASH(x) ((x) ? 1231 : 1237)\n"\
+"#define KEY2INTHASH(x) ((x) ? 0xfab5368 : 0xcba05e7b)\n"\
+"#define KEY2LONGHASH(x) ((x) ? 0x74a19fc8b6428188L : 0xbaeca2031a4fd9ecL)\n"\
+"#else\n"\
+"#define KEY2JAVAHASH(x) (x)\n"\
+"#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( (x) ) )\n"\
+"#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(x) ) )\n"\
+"#endif\n"\
+"#endif\n"\
+\
+"#endif\n"\
+\
+\
+\
+"/* Object/Reference-only definitions (values) */\n"\
+\
+\
+"#if #valueclass(Object) || #valueclass(Reference)\n"\
+"#define VALUE_OBJ2TYPE(x) (x)\n"\
+"#define VALUE_CLASS2TYPE(x) (x)\n"\
+"#define VALUE2OBJ(x) (x)\n"\
+\
+"#if #valueclass(Object)\n"\
+"#define VALUE2JAVAHASH(x) ( (x) == null ? 0 : (x).hashCode() )\n"\
+"#else\n"\
+"#define VALUE2JAVAHASH(x) ( (x) == null ? 0 : System.identityHashCode(x) )\n"\
+"#endif\n"\
+\
+"#define VALUE_NULL (null)\n"\
+"#define OBJECT_DEFAULT_RETURN_VALUE (this.defRetValue)\n"\
+\
+"#else\n"\
+\
+\
+"/* Primitive-type-only definitions (values) */\n"\
+\
+\
+"#define VALUE_CLASS2TYPE(x) ((x).VALUE_VALUE())\n"\
+"#define VALUE_OBJ2TYPE(x) (VALUE_CLASS2TYPE((VALUE_CLASS)(x)))\n"\
+"#define VALUE2OBJ(x) (VALUE_CLASS.valueOf(x))\n"\
+\
+"#if #valueclass(Float) || #valueclass(Double) || #valueclass(Long)\n"\
+"#define VALUE_NULL (0)\n"\
+"#define VALUE2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.${TYPE[$v]}2int(x)\n"\
+"#elif #valueclass(Boolean)\n"\
+"#define VALUE_NULL (false)\n"\
+"#define VALUE2JAVAHASH(x) (x ? 1231 : 1237)\n"\
+"#else\n"\
+"#if #valueclass(Integer)\n"\
+"#define VALUE_NULL (0)\n"\
+"#else\n"\
+"#define VALUE_NULL ((VALUE_TYPE)0)\n"\
+"#endif\n"\
+"#define VALUE2JAVAHASH(x) (x)\n"\
+"#endif\n"\
+\
+"#define OBJECT_DEFAULT_RETURN_VALUE (null)\n"\
+\
+"#endif\n"\
+\
+"#include \"$1\"\n"
diff --git a/makefile b/makefile
new file mode 100644
index 0000000..ced451a
--- /dev/null
+++ b/makefile
@@ -0,0 +1,666 @@
+include build.properties
+
+TAR=tar
+PKG_PATH = it/unimi/dsi/fastutil
+SOURCEDIR = src/$(PKG_PATH)
+GEN_SRCDIR ?= src
+export GEN_SRCDIR
+DOCSDIR = docs
+
+APIURL=http://java.sun.com/j2se/5.0/docs/api # External URLs in the docs will point here
+
+.SUFFIXES: .java .j
+
+.PHONY: all clean depend install docs jar tar jsources csources dirs
+
+.SECONDARY: $(JSOURCES)
+
+#  The capitalized types used to build class and method names; boolean and object types are not listed.
+TYPE_NOBOOL_NOOBJ=Byte Short Int Long Char Float Double
+
+#  The capitalized types used to build class and method names; boolean and reference are not listed.
+TYPE_NOBOOL_NOREF=$(TYPE_NOBOOL_NOOBJ) Object
+
+#  The capitalized types used to build class and method names; object types are not listed.
+TYPE_NOOBJ=Boolean $(TYPE_NOBOOL_NOOBJ)
+
+#  The capitalized types used to build class and method names; references are not listed.
+TYPE_NOREF=$(TYPE_NOOBJ) Object
+
+#  The capitalized types used to build class and method names; boolean is not listed.
+TYPE_NOBOOL=$(TYPE_NOBOOL_NOREF) Reference
+
+# The capitalized types used to build class and method names; now references appear as Reference.
+TYPE=$(TYPE_NOREF) Reference
+
+#  The capitalized types used to build class and method names; only types for which big structures are built are listed.
+TYPE_BIG=Int Long Float Double Object Reference
+
+
+# These variables are used as an associative array (using computed names).
+PACKAGE_Boolean = booleans
+PACKAGE_Byte = bytes
+PACKAGE_Short = shorts
+PACKAGE_Int = ints
+PACKAGE_Long = longs
+PACKAGE_Char = chars
+PACKAGE_Float= floats
+PACKAGE_Double = doubles
+PACKAGE_Object = objects
+PACKAGE_Reference = objects
+
+explain:
+	@echo -e "\nTo build fastutil, you must first use \"make sources\""
+	@echo -e "to obtain the actual Java files. Then, you can build the jar"
+	@echo -e "file using \"ant jar\", or the documentation using \"ant javadoc\".\n"
+	@echo -e "If you set the make variable TEST (e.g., make sources TEST=1), you"
+	@echo -e "will compile behavioral and speed tests into the classes.\n"
+	@echo -e "If you set the make variable ASSERTS (e.g., make sources ASSERTS=1), you"
+	@echo -e "will compile assertions into the classes.\n\n"
+
+source:
+	-rm -f fastutil-$(version)
+	ln -s . fastutil-$(version)
+	$(TAR) zcvf fastutil-$(version)-src.tar.gz --owner=0 --group=0 \
+		fastutil-$(version)/drv/*.drv \
+		fastutil-$(version)/build.xml \
+		fastutil-$(version)/pom.xml \
+		fastutil-$(version)/build.properties \
+		fastutil-$(version)/gencsource.sh \
+		fastutil-$(version)/CHANGES \
+		fastutil-$(version)/README \
+		fastutil-$(version)/LICENSE-2.0 \
+		fastutil-$(version)/makefile \
+		$(foreach f, $(SOURCES), fastutil-$(version)/$(f)) \
+		fastutil-$(version)/$(SOURCEDIR)/{boolean,byte,char,short,int,long,float,double,object}s/package.html \
+		fastutil-$(version)/$(SOURCEDIR)/io/package.html \
+		fastutil-$(version)/src/overview.html \
+		$$(find fastutil-$(version)/test -iname \*.java)
+	rm fastutil-$(version)
+
+binary:
+	make -s clean sources
+	ant clean jar javadoc
+	-rm -f fastutil-$(version)
+	ln -s . fastutil-$(version)
+	$(TAR) zcvf fastutil-$(version)-bin.tar.gz --owner=0 --group=0 \
+		fastutil-$(version)/CHANGES \
+		fastutil-$(version)/README \
+		fastutil-$(version)/LICENSE-2.0 \
+		fastutil-$(version)/docs \
+		fastutil-$(version)/fastutil-$(version).jar
+	rm fastutil-$(version)
+
+stage:
+	(sed -e s/VERSION/$$(grep version build.properties | cut -d= -f2)/ <pom-model.xml >pom.xml)
+	(unset LOCAL_IVY_SETTINGS; ant stage)
+
+
+dirs:
+	mkdir -p $(GEN_SRCDIR)/$(PKG_PATH)
+	mkdir -p $(GEN_SRCDIR)/$(PKG_PATH)/io
+	mkdir -p $(foreach k, $(sort $(TYPE)), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k)))
+
+#
+# Interfaces
+#
+
+ITERABLES := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Iterable.c)
+$(ITERABLES): drv/Iterable.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ITERABLES)
+
+COLLECTIONS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Collection.c)
+$(COLLECTIONS): drv/Collection.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(COLLECTIONS)
+
+SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Set.c)
+$(SETS): drv/Set.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(SETS)
+
+HASHES := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Hash.c)
+$(HASHES): drv/Hash.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(HASHES)
+
+SORTED_SETS := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)SortedSet.c)
+$(SORTED_SETS): drv/SortedSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(SORTED_SETS)
+
+FUNCTIONS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)Function.c))
+$(FUNCTIONS): drv/Function.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(FUNCTIONS)
+
+MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)Map.c))
+$(MAPS): drv/Map.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(MAPS)
+
+SORTED_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)SortedMap.c))
+$(SORTED_MAPS): drv/SortedMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(SORTED_MAPS)
+
+LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)List.c)
+$(LISTS): drv/List.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(LISTS)
+
+STACKS := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Stack.c)
+$(STACKS): drv/Stack.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(STACKS)
+
+PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)PriorityQueue.c)
+$(PRIORITY_QUEUES): drv/PriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(PRIORITY_QUEUES)
+
+INDIRECT_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)IndirectPriorityQueue.c)
+$(INDIRECT_PRIORITY_QUEUES): drv/IndirectPriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(INDIRECT_PRIORITY_QUEUES)
+
+COMPARATORS := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Comparator.c)
+$(COMPARATORS): drv/Comparator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(COMPARATORS)
+
+ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Iterator.c)
+$(ITERATORS): drv/Iterator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ITERATORS)
+
+BIDIRECTIONAL_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BidirectionalIterator.c)
+$(BIDIRECTIONAL_ITERATORS): drv/BidirectionalIterator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(BIDIRECTIONAL_ITERATORS)
+
+LIST_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ListIterator.c)
+$(LIST_ITERATORS): drv/ListIterator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(LIST_ITERATORS)
+
+BIG_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigList.c)
+$(BIG_LISTS): drv/BigList.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(BIG_LISTS)
+
+BIG_LIST_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigListIterator.c)
+$(BIG_LIST_ITERATORS): drv/BigListIterator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(BIG_LIST_ITERATORS)
+
+#
+# Abstract implementations
+#
+
+ABSTRACT_COLLECTIONS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Collection.c)
+$(ABSTRACT_COLLECTIONS): drv/AbstractCollection.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_COLLECTIONS)
+
+ABSTRACT_SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Set.c)
+$(ABSTRACT_SETS): drv/AbstractSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_SETS)
+
+ABSTRACT_SORTED_SETS := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)SortedSet.c)
+$(ABSTRACT_SORTED_SETS): drv/AbstractSortedSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_SORTED_SETS)
+
+ABSTRACT_FUNCTIONS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)2$(v)Function.c))
+$(ABSTRACT_FUNCTIONS): drv/AbstractFunction.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_FUNCTIONS)
+
+ABSTRACT_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)2$(v)Map.c))
+$(ABSTRACT_MAPS): drv/AbstractMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_MAPS)
+
+ABSTRACT_SORTED_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)2$(v)SortedMap.c))
+$(ABSTRACT_SORTED_MAPS): drv/AbstractSortedMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_SORTED_MAPS)
+
+ABSTRACT_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)List.c)
+$(ABSTRACT_LISTS): drv/AbstractList.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_LISTS)
+
+ABSTRACT_BIG_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)BigList.c)
+$(ABSTRACT_BIG_LISTS): drv/AbstractBigList.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_BIG_LISTS)
+
+ABSTRACT_STACKS := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Stack.c)
+$(ABSTRACT_STACKS): drv/AbstractStack.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_STACKS)
+
+ABSTRACT_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)PriorityQueue.c)
+$(ABSTRACT_PRIORITY_QUEUES): drv/AbstractPriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_PRIORITY_QUEUES)
+
+ABSTRACT_COMPARATORS := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Comparator.c)
+$(ABSTRACT_COMPARATORS): drv/AbstractComparator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_COMPARATORS)
+
+ABSTRACT_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)Iterator.c)
+$(ABSTRACT_ITERATORS): drv/AbstractIterator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_ITERATORS)
+
+ABSTRACT_BIDIRECTIONAL_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)BidirectionalIterator.c)
+$(ABSTRACT_BIDIRECTIONAL_ITERATORS): drv/AbstractBidirectionalIterator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_BIDIRECTIONAL_ITERATORS)
+
+ABSTRACT_LIST_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)ListIterator.c)
+$(ABSTRACT_LIST_ITERATORS): drv/AbstractListIterator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_LIST_ITERATORS)
+
+ABSTRACT_BIG_LIST_ITERATORS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Abstract$(k)BigListIterator.c)
+$(ABSTRACT_BIG_LIST_ITERATORS): drv/AbstractBigListIterator.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ABSTRACT_BIG_LIST_ITERATORS)
+
+#
+# Concrete implementations
+#
+
+OPEN_HASH_SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)OpenHashSet.c)
+$(OPEN_HASH_SETS): drv/OpenHashSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(OPEN_HASH_SETS)
+
+OPEN_HASH_BIG_SETS := $(foreach k,$(TYPE_BIG), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)OpenHashBigSet.c)
+$(OPEN_HASH_BIG_SETS): drv/OpenHashBigSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(OPEN_HASH_BIG_SETS)
+
+LINKED_OPEN_HASH_SETS := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)LinkedOpenHashSet.c)
+$(LINKED_OPEN_HASH_SETS): drv/LinkedOpenHashSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(LINKED_OPEN_HASH_SETS)
+
+OPEN_CUSTOM_HASH_SETS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)OpenCustomHashSet.c)
+$(OPEN_CUSTOM_HASH_SETS): drv/OpenCustomHashSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(OPEN_CUSTOM_HASH_SETS)
+
+LINKED_OPEN_CUSTOM_HASH_SETS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)LinkedOpenCustomHashSet.c)
+$(LINKED_OPEN_CUSTOM_HASH_SETS): drv/LinkedOpenCustomHashSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(LINKED_OPEN_CUSTOM_HASH_SETS)
+
+ARRAY_SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArraySet.c)
+$(ARRAY_SETS): drv/ArraySet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ARRAY_SETS)
+
+AVL_TREE_SETS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)AVLTreeSet.c)
+$(AVL_TREE_SETS): drv/AVLTreeSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(AVL_TREE_SETS)
+
+RB_TREE_SETS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)RBTreeSet.c)
+$(RB_TREE_SETS): drv/RBTreeSet.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(RB_TREE_SETS)
+
+OPEN_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)OpenHashMap.c))
+$(OPEN_HASH_MAPS): drv/OpenHashMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(OPEN_HASH_MAPS)
+
+LINKED_OPEN_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)LinkedOpenHashMap.c))
+$(LINKED_OPEN_HASH_MAPS): drv/LinkedOpenHashMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(LINKED_OPEN_HASH_MAPS)
+
+OPEN_CUSTOM_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)OpenCustomHashMap.c))
+$(OPEN_CUSTOM_HASH_MAPS): drv/OpenCustomHashMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(OPEN_CUSTOM_HASH_MAPS)
+
+LINKED_OPEN_CUSTOM_HASH_MAPS := $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/objects/Object2$(v)LinkedOpenCustomHashMap.c)
+$(LINKED_OPEN_CUSTOM_HASH_MAPS): drv/LinkedOpenCustomHashMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(LINKED_OPEN_CUSTOM_HASH_MAPS)
+
+#STRIPED_OPEN_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/Striped$(k)2$(v)OpenHashMap.c))
+#$(STRIPED_OPEN_HASH_MAPS): drv/StripedOpenHashMap.drv; ./gencsource.sh $< $@ >$@
+
+#CSOURCES += $(STRIPED_OPEN_HASH_MAPS)
+
+ARRAY_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)ArrayMap.c))
+$(ARRAY_MAPS): drv/ArrayMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ARRAY_MAPS)
+
+AVL_TREE_MAPS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)AVLTreeMap.c))
+$(AVL_TREE_MAPS): drv/AVLTreeMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(AVL_TREE_MAPS)
+
+RB_TREE_MAPS := $(foreach k,$(TYPE_NOBOOL_NOREF), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)RBTreeMap.c))
+$(RB_TREE_MAPS): drv/RBTreeMap.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(RB_TREE_MAPS)
+
+ARRAY_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayList.c)
+$(ARRAY_LISTS): drv/ArrayList.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ARRAY_LISTS)
+
+BIG_ARRAY_BIG_LISTS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigArrayBigList.c)
+$(BIG_ARRAY_BIG_LISTS): drv/BigArrayBigList.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(BIG_ARRAY_BIG_LISTS)
+
+FRONT_CODED_LISTS := $(foreach k, Byte Short Int Long Char, $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayFrontCodedList.c)
+$(FRONT_CODED_LISTS): drv/ArrayFrontCodedList.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(FRONT_CODED_LISTS)
+
+HEAP_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)HeapPriorityQueue.c)
+$(HEAP_PRIORITY_QUEUES): drv/HeapPriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(HEAP_PRIORITY_QUEUES)
+
+ARRAY_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayPriorityQueue.c)
+$(ARRAY_PRIORITY_QUEUES): drv/ArrayPriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ARRAY_PRIORITY_QUEUES)
+
+ARRAY_FIFO_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayFIFOQueue.c)
+$(ARRAY_FIFO_QUEUES): drv/ArrayFIFOQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ARRAY_FIFO_QUEUES)
+
+HEAP_SEMI_INDIRECT_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)HeapSemiIndirectPriorityQueue.c)
+$(HEAP_SEMI_INDIRECT_PRIORITY_QUEUES): drv/HeapSemiIndirectPriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(HEAP_SEMI_INDIRECT_PRIORITY_QUEUES)
+
+HEAP_INDIRECT_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)HeapIndirectPriorityQueue.c)
+$(HEAP_INDIRECT_PRIORITY_QUEUES): drv/HeapIndirectPriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(HEAP_INDIRECT_PRIORITY_QUEUES)
+
+ARRAY_INDIRECT_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayIndirectPriorityQueue.c)
+$(ARRAY_INDIRECT_PRIORITY_QUEUES): drv/ArrayIndirectPriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ARRAY_INDIRECT_PRIORITY_QUEUES)
+
+
+#
+# Static containers
+#
+
+ITERATORS_STATIC := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Iterators.c)
+$(ITERATORS_STATIC): drv/Iterators.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ITERATORS_STATIC)
+
+
+BIG_LIST_ITERATORS_STATIC := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigListIterators.c)
+$(BIG_LIST_ITERATORS_STATIC): drv/BigListIterators.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(BIG_LIST_ITERATORS_STATIC)
+
+
+COLLECTIONS_STATIC := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Collections.c)
+$(COLLECTIONS_STATIC): drv/Collections.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(COLLECTIONS_STATIC)
+
+
+SETS_STATIC := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Sets.c)
+$(SETS_STATIC): drv/Sets.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(SETS_STATIC)
+
+
+SORTED_SETS_STATIC := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)SortedSets.c)
+$(SORTED_SETS_STATIC): drv/SortedSets.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(SORTED_SETS_STATIC)
+
+
+LISTS_STATIC := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Lists.c)
+$(LISTS_STATIC): drv/Lists.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(LISTS_STATIC)
+
+
+BIG_LISTS_STATIC := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigLists.c)
+$(BIG_LISTS_STATIC): drv/BigLists.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(BIG_LISTS_STATIC)
+
+
+ARRAYS_STATIC := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Arrays.c)
+$(ARRAYS_STATIC): drv/Arrays.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(ARRAYS_STATIC)
+
+
+BIG_ARRAYS_STATIC := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)BigArrays.c)
+$(BIG_ARRAYS_STATIC): drv/BigArrays.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(BIG_ARRAYS_STATIC)
+
+
+PRIORITY_QUEUES_STATIC := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)PriorityQueues.c)
+$(PRIORITY_QUEUES_STATIC): drv/PriorityQueues.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(PRIORITY_QUEUES_STATIC)
+
+
+HEAPS_STATIC := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Heaps.c)
+$(HEAPS_STATIC): drv/Heaps.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(HEAPS_STATIC)
+
+
+SEMI_INDIRECT_HEAPS_STATIC := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)SemiIndirectHeaps.c)
+$(SEMI_INDIRECT_HEAPS_STATIC): drv/SemiIndirectHeaps.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(SEMI_INDIRECT_HEAPS_STATIC)
+
+
+INDIRECT_HEAPS_STATIC := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)IndirectHeaps.c)
+$(INDIRECT_HEAPS_STATIC): drv/IndirectHeaps.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(INDIRECT_HEAPS_STATIC)
+
+
+FUNCTIONS_STATIC := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)Functions.c))
+$(FUNCTIONS_STATIC): drv/Functions.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(FUNCTIONS_STATIC)
+
+
+MAPS_STATIC := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)Maps.c))
+$(MAPS_STATIC): drv/Maps.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(MAPS_STATIC)
+
+
+SORTED_MAPS_STATIC := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)SortedMaps.c))
+$(SORTED_MAPS_STATIC): drv/SortedMaps.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(SORTED_MAPS_STATIC)
+
+
+COMPARATORS_STATIC := $(foreach k,$(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)Comparators.c)
+$(COMPARATORS_STATIC): drv/Comparators.drv; ./gencsource.sh $< $@ >$@
+
+CSOURCES += $(COMPARATORS_STATIC)
+
+#
+# Fragmented stuff
+#
+
+BINIO_FRAGMENTS := $(foreach k,$(TYPE_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/io/$(k)BinIOFragment.h)
+$(BINIO_FRAGMENTS): drv/BinIOFragment.drv; ./gencsource.sh $< $@ >$@
+
+CFRAGMENTS += $(BINIO_FRAGMENTS)
+
+$(GEN_SRCDIR)/$(PKG_PATH)/io/BinIO.c: drv/BinIO.drv $(BINIO_FRAGMENTS)
+	./gencsource.sh drv/BinIO.drv $@ >$@
+
+CSOURCES += $(GEN_SRCDIR)/$(PKG_PATH)/io/BinIO.c
+
+
+TEXTIO_FRAGMENTS := $(foreach k,$(TYPE_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/io/$(k)TextIOFragment.h)
+$(TEXTIO_FRAGMENTS): drv/TextIOFragment.drv; ./gencsource.sh $< $@ >$@
+
+CFRAGMENTS += $(TEXTIO_FRAGMENTS)
+
+$(GEN_SRCDIR)/$(PKG_PATH)/io/TextIO.c: drv/TextIO.drv $(TEXTIO_FRAGMENTS)
+	./gencsource.sh drv/TextIO.drv $@ >$@
+
+CSOURCES += $(GEN_SRCDIR)/$(PKG_PATH)/io/TextIO.c
+
+#
+# Old sources, generated only with the old target
+#
+
+OPEN_DOUBLE_HASH_SETS := $(foreach k,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)OpenDoubleHashSet.c)
+$(OPEN_DOUBLE_HASH_SETS): drv/OpenDoubleHashSet.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(OPEN_DOUBLE_HASH_SETS)
+
+LINKED_OPEN_DOUBLE_HASH_SETS := $(foreach k,$(TYPE_NOBOOL), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)LinkedOpenDoubleHashSet.c)
+$(LINKED_OPEN_DOUBLE_HASH_SETS): drv/LinkedOpenDoubleHashSet.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(LINKED_OPEN_DOUBLE_HASH_SETS)
+
+OPEN_DOUBLE_CUSTOM_HASH_SETS := $(GEN_SRCDIR)/$(PKG_PATH)/objects/ObjectOpenCustomDoubleHashSet.c
+$(OPEN_DOUBLE_CUSTOM_HASH_SETS): drv/OpenCustomDoubleHashSet.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(OPEN_DOUBLE_CUSTOM_HASH_SETS)
+
+LINKED_OPEN_DOUBLE_CUSTOM_HASH_SETS := $(GEN_SRCDIR)/$(PKG_PATH)/objects/ObjectLinkedOpenCustomDoubleHashSet.c
+$(LINKED_OPEN_DOUBLE_CUSTOM_HASH_SETS): drv/LinkedOpenCustomDoubleHashSet.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(LINKED_OPEN_CUSTOM_DOUBLE_HASH_SETS)
+
+OPEN_DOUBLE_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)OpenDoubleHashMap.c))
+$(OPEN_DOUBLE_HASH_MAPS): drv/OpenDoubleHashMap.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(OPEN_DOUBLE_HASH_MAPS)
+
+LINKED_OPEN_DOUBLE_HASH_MAPS := $(foreach k,$(TYPE_NOBOOL), $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)2$(v)LinkedOpenDoubleHashMap.c))
+$(LINKED_OPEN_DOUBLE_HASH_MAPS): drv/LinkedOpenDoubleHashMap.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(LINKED_OPEN_DOUBLE_HASH_MAPS)
+
+OPEN_CUSTOM_DOUBLE_HASH_MAPS := $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/objects/Object2$(v)OpenCustomDoubleHashMap.c)
+$(OPEN_CUSTOM_DOUBLE_HASH_MAPS): drv/OpenCustomDoubleHashMap.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(OPEN_CUSTOM_DOUBLE_HASH_MAPS)
+
+LINKED_OPEN_CUSTOM_DOUBLE_HASH_MAPS := $(foreach v,$(TYPE), $(GEN_SRCDIR)/$(PKG_PATH)/objects/Object2$(v)LinkedOpenCustomDoubleHashMap.c)
+$(LINKED_OPEN_CUSTOM_DOUBLE_HASH_MAPS): drv/LinkedOpenCustomDoubleHashMap.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(LINKED_OPEN_CUSTOM_DOUBLE_HASH_MAPS)
+
+INDIRECT_DOUBLE_PRIORITY_QUEUES := $(foreach k,$(TYPE_NOBOOL_NOOBJ), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)IndirectDoublePriorityQueue.c)
+$(INDIRECT_DOUBLE_PRIORITY_QUEUES): drv/IndirectDoublePriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(INDIRECT_DOUBLE_PRIORITY_QUEUES)
+
+HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)HeapSesquiIndirectDoublePriorityQueue.c)
+$(HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUES): drv/HeapSesquiIndirectDoublePriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUES)
+
+HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)HeapIndirectDoublePriorityQueue.c)
+$(HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUES): drv/HeapIndirectDoublePriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUES)
+
+ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUES := $(foreach k, $(TYPE_NOBOOL_NOREF), $(GEN_SRCDIR)/$(PKG_PATH)/$(PACKAGE_$(k))/$(k)ArrayIndirectDoublePriorityQueue.c)
+$(ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUES): drv/ArrayIndirectDoublePriorityQueue.drv; ./gencsource.sh $< $@ >$@
+
+OLDCSOURCES += $(ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUES)
+
+
+JSOURCES = $(CSOURCES:.c=.java) # The list of generated Java source files
+OLDJSOURCES = $(OLDCSOURCES:.c=.java) 
+
+
+SOURCES = \
+	$(SOURCEDIR)/Function.java \
+	$(SOURCEDIR)/Hash.java \
+	$(SOURCEDIR)/HashCommon.java \
+	$(SOURCEDIR)/BidirectionalIterator.java \
+	$(SOURCEDIR)/Stack.java \
+	$(SOURCEDIR)/BigList.java \
+	$(SOURCEDIR)/BigListIterator.java \
+	$(SOURCEDIR)/BigArrays.java \
+	$(SOURCEDIR)/PriorityQueue.java \
+	$(SOURCEDIR)/IndirectPriorityQueue.java \
+	$(SOURCEDIR)/Maps.java \
+	$(SOURCEDIR)/Arrays.java \
+	$(SOURCEDIR)/Swapper.java \
+	$(SOURCEDIR)/BigSwapper.java \
+	$(SOURCEDIR)/Size64.java \
+	$(SOURCEDIR)/PriorityQueues.java \
+	$(SOURCEDIR)/IndirectPriorityQueues.java \
+	$(SOURCEDIR)/AbstractPriorityQueue.java \
+	$(SOURCEDIR)/AbstractIndirectPriorityQueue.java \
+	$(SOURCEDIR)/AbstractStack.java \
+	$(SOURCEDIR)/io/FastByteArrayInputStream.java \
+	$(SOURCEDIR)/io/FastByteArrayOutputStream.java \
+	$(SOURCEDIR)/io/FastMultiByteArrayInputStream.java \
+	$(SOURCEDIR)/io/FastBufferedInputStream.java \
+	$(SOURCEDIR)/io/FastBufferedOutputStream.java \
+	$(SOURCEDIR)/io/InspectableFileCachedInputStream.java \
+	$(SOURCEDIR)/io/MeasurableInputStream.java \
+	$(SOURCEDIR)/io/MeasurableOutputStream.java \
+	$(SOURCEDIR)/io/MeasurableStream.java \
+	$(SOURCEDIR)/io/RepositionableStream.java # These are True Java Sources instead
+
+OLDSOURCES = \
+	$(SOURCEDIR)/IndirectDoublePriorityQueue.java \
+	$(SOURCEDIR)/IndirectDoublePriorityQueues.java \
+	$(SOURCEDIR)/AbstractIndirectDoublePriorityQueue.java
+
+
+# We pass each generated Java source through the preprocessor. TEST compiles in the test code,
+# whereas ASSERTS compiles in some assertions (whose testing, of course, must be enabled in the JVM).
+
+$(JSOURCES) $(OLDJSOURCES): %.java: %.c
+	gcc -w -I. -ftabstop=4 $(if $(TEST),-DTEST,) $(if $(ASSERTS),-DASSERTS_CODE,) -DASSERTS_VALUE=$(if $(ASSERTS),true,false) -E -C -P $< >$@
+
+
+clean: 
+	@find . -name \*.class -exec rm {} \;  
+	@find . -name \*.java~ -exec rm {} \;  
+	@find . -name \*.html~ -exec rm {} \;  
+	@rm -f $(GEN_SRCDIR)/$(PKG_PATH)/{booleans,bytes,shorts,chars,ints,longs,floats,doubles,objects}/*.java
+	@rm -f $(GEN_SRCDIR)/$(PKG_PATH)/*.{c,h,j} $(GEN_SRCDIR)/$(PKG_PATH)/*/*.{c,h,j}
+	@rm -fr $(DOCSDIR)/*
+
+
+sources: $(JSOURCES)
+
+oldsources: $(OLDJSOURCES)
+
+csources: $(CSOURCES)
+
+oldcsources: $(OLDCSOURCES)
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..702e0d2
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,30 @@
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>it.unimi.dsi</groupId>
+  <artifactId>fastutil</artifactId>
+  <packaging>jar</packaging>
+  <name>fastutil</name>
+  <version>6.5.3</version>
+  <description>fastutil extends the Java Collections Framework by providing type-specific maps, sets, lists and priority queues with a small memory footprint and fast access and insertion; provides also big (64-bit) arrays, sets and lists, and fast, practical I/O classes for binary and text files.</description>
+  <url>http://fasutil.dsi.unimi.it/</url>
+  <licenses>
+    <license>
+      <name>Apache License, Version 2.0</name>
+      <url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+  <scm>
+    <connection>scm:git://github.com/vigna/fastutil.git</connection>
+    <url>https://github.com/vigna/fastutil</url>
+  </scm>
+  <developers>
+    <developer>
+      <id>vigna</id>
+      <name>Sebastiano Vigna</name>
+      <email>vigna at dsi.unimi.it</email>
+    </developer>
+  </developers>
+  <dependencies>
+  </dependencies>
+</project>
diff --git a/src/it/unimi/dsi/fastutil/AbstractIndirectPriorityQueue.java b/src/it/unimi/dsi/fastutil/AbstractIndirectPriorityQueue.java
new file mode 100644
index 0000000..5787a9e
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/AbstractIndirectPriorityQueue.java
@@ -0,0 +1,41 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+/** An abstract class providing basic methods for implementing the {@link IndirectPriorityQueue} interface.
+ *
+ * <P>This class defines {@link #changed(int)}, {@link #allChanged()},  {@link #remove(int)} and {@link #last()} as throwing an
+ * {@link UnsupportedOperationException}.
+ */
+
+public abstract class AbstractIndirectPriorityQueue<K> implements IndirectPriorityQueue<K> {
+
+	public int last() { throw new UnsupportedOperationException(); }
+
+	public void changed() { changed( first() ); }
+	
+	public void changed( int index ) { throw new UnsupportedOperationException(); }
+
+	public void allChanged() { throw new UnsupportedOperationException(); }
+
+	public boolean remove( int index ) { throw new UnsupportedOperationException(); }
+
+	public boolean contains( int index ) { throw new UnsupportedOperationException(); }
+
+	public boolean isEmpty() { return size() == 0; }
+
+}
diff --git a/src/it/unimi/dsi/fastutil/AbstractPriorityQueue.java b/src/it/unimi/dsi/fastutil/AbstractPriorityQueue.java
new file mode 100644
index 0000000..0b2aa45
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/AbstractPriorityQueue.java
@@ -0,0 +1,35 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+import it.unimi.dsi.fastutil.PriorityQueue;
+
+/**  An abstract class providing basic methods for implementing the {@link PriorityQueue} interface. 
+ *
+ * <P>This class defines {@link #changed()} and {@link #last()} as throwing an
+ * {@link UnsupportedOperationException}.
+ */
+
+public abstract class AbstractPriorityQueue<K> implements PriorityQueue<K> {
+
+	public void changed() { throw new UnsupportedOperationException(); }
+
+	public K last() { throw new UnsupportedOperationException(); }
+
+	public boolean isEmpty() { return size() == 0; }
+
+}
diff --git a/src/it/unimi/dsi/fastutil/AbstractStack.java b/src/it/unimi/dsi/fastutil/AbstractStack.java
new file mode 100644
index 0000000..fffb605
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/AbstractStack.java
@@ -0,0 +1,41 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+/** An abstract class providing basic methods for implementing the {@link Stack} interface.
+ *
+ * <P>This class just defines {@link Stack#top()} as {@link Stack#peek(int) peek(0)}, and
+ * {@link Stack#peek(int)} as throwing an {@link UnsupportedOperationException}.
+ *
+ * Subclasses of this class may choose to implement just {@link Stack#push(Object)},
+ * {@link Stack#pop()} and {@link Stack#isEmpty()}, or (but this is not
+ * required) go farther and implement {@link Stack#top()}, or even {@link
+ * Stack#peek(int)}.
+ */
+
+public abstract class AbstractStack<K> implements Stack<K> {
+
+	public K top() {
+		return peek( 0 );
+	}
+
+	public K peek( int i ) {
+		throw new UnsupportedOperationException();
+	}
+
+}
diff --git a/src/it/unimi/dsi/fastutil/Arrays.java b/src/it/unimi/dsi/fastutil/Arrays.java
new file mode 100644
index 0000000..c917650
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/Arrays.java
@@ -0,0 +1,330 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+import java.util.ArrayList;
+
+import it.unimi.dsi.fastutil.ints.IntComparator;
+
+/** A class providing static methods and objects that do useful things with arrays.
+ *
+ * <p>In addition to commodity methods, this class contains {@link Swapper}-based implementations
+ * of {@linkplain #quickSort(int, int, IntComparator, Swapper) quicksort} and of
+ * a stable, in-place {@linkplain #mergeSort(int, int, IntComparator, Swapper) mergesort}. These
+ * generic sorting methods can be used to sort any kind of list, but they find their natural
+ * usage, for instance, in sorting arrays in parallel.
+ *
+ * @see Arrays
+ */
+
+public class Arrays {
+	
+	private Arrays() {}
+
+	/** This is a safe value used by {@link ArrayList} (as of Java 7) to avoid
+	 *  throwing {@link OutOfMemoryError} on some JVMs. We adopt the same value. */
+    public static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
+
+    /** Ensures that a range given by its first (inclusive) and last (exclusive) elements fits an array of given length.
+	 *
+	 * <P>This method may be used whenever an array range check is needed.
+	 *
+	 * @param arrayLength an array length.
+	 * @param from a start index (inclusive).
+	 * @param to an end index (inclusive).
+	 * @throws IllegalArgumentException if <code>from</code> is greater than <code>to</code>.
+	 * @throws ArrayIndexOutOfBoundsException if <code>from</code> or <code>to</code> are greater than <code>arrayLength</code> or negative.
+	 */
+	public static void ensureFromTo( final int arrayLength, final int from, final int to ) {
+		if ( from < 0 ) throw new ArrayIndexOutOfBoundsException( "Start index (" + from + ") is negative" );
+		if ( from > to ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+		if ( to > arrayLength ) throw new ArrayIndexOutOfBoundsException( "End index (" + to + ") is greater than array length (" + arrayLength + ")" );
+	}
+
+	/** Ensures that a range given by an offset and a length fits an array of given length.
+	 *
+	 * <P>This method may be used whenever an array range check is needed.
+	 *
+	 * @param arrayLength an array length.
+	 * @param offset a start index for the fragment
+	 * @param length a length (the number of elements in the fragment).
+	 * @throws IllegalArgumentException if <code>length</code> is negative.
+	 * @throws ArrayIndexOutOfBoundsException if <code>offset</code> is negative or <code>offset</code>+<code>length</code> is greater than <code>arrayLength</code>.
+	 */
+	public static void ensureOffsetLength( final int arrayLength, final int offset, final int length ) {
+		if ( offset < 0 ) throw new ArrayIndexOutOfBoundsException( "Offset (" + offset + ") is negative" );
+		if ( length < 0 ) throw new IllegalArgumentException( "Length (" + length + ") is negative" );
+		if ( offset + length > arrayLength ) throw new ArrayIndexOutOfBoundsException( "Last index (" + ( offset + length ) + ") is greater than array length (" + arrayLength + ")" );
+	}
+
+	private static final int SMALL = 7;
+	private static final int MEDIUM = 40;
+
+	/**
+	 * Transforms two consecutive sorted ranges into a single sorted range. The initial ranges are
+	 * <code>[first..middle)</code> and <code>[middle..last)</code>, and the resulting range is
+	 * <code>[first..last)</code>. Elements in the first input range will precede equal elements in
+	 * the second.
+	 */
+	private static void inPlaceMerge( final int from, int mid, final int to, final IntComparator comp, final Swapper swapper ) {
+		if ( from >= mid || mid >= to ) return;
+		if ( to - from == 2 ) {
+			if ( comp.compare( mid, from ) < 0 ) swapper.swap( from, mid );
+			return;
+		}
+
+		int firstCut;
+		int secondCut;
+
+		if ( mid - from > to - mid ) {
+			firstCut = from + ( mid - from ) / 2;
+			secondCut = lowerBound( mid, to, firstCut, comp );
+		}
+		else {
+			secondCut = mid + ( to - mid ) / 2;
+			firstCut = upperBound( from, mid, secondCut, comp );
+		}
+
+		int first2 = firstCut;
+		int middle2 = mid;
+		int last2 = secondCut;
+		if ( middle2 != first2 && middle2 != last2 ) {
+			int first1 = first2;
+			int last1 = middle2;
+			while ( first1 < --last1 )
+				swapper.swap( first1++, last1 );
+			first1 = middle2;
+			last1 = last2;
+			while ( first1 < --last1 )
+				swapper.swap( first1++, last1 );
+			first1 = first2;
+			last1 = last2;
+			while ( first1 < --last1 )
+				swapper.swap( first1++, last1 );
+		}
+
+		mid = firstCut + ( secondCut - mid );
+		inPlaceMerge( from, firstCut, mid, comp, swapper );
+		inPlaceMerge( mid, secondCut, to, comp, swapper );
+	}
+
+	/**
+	 * Performs a binary search on an already-sorted range: finds the first position where an
+	 * element can be inserted without violating the ordering. Sorting is by a user-supplied
+	 * comparison function.
+	 * 
+	 * @param from the index of the first element (inclusive) to be included in the binary search.
+	 * @param to the index of the last element (exclusive) to be included in the binary search.
+	 * @param pos the position of the element to be searched for.
+	 * @param comp the comparison function.
+	 * @return the largest index i such that, for every j in the range <code>[first..i)</code>,
+	 * <code>comp.compare(j, pos)</code> is <code>true</code>.
+	 */
+	private static int lowerBound( int from, final int to, final int pos, final IntComparator comp ) {
+		// if (comp==null) throw new NullPointerException();
+		int len = to - from;
+		while ( len > 0 ) {
+			int half = len / 2;
+			int middle = from + half;
+			if ( comp.compare( middle, pos ) < 0 ) {
+				from = middle + 1;
+				len -= half + 1;
+			}
+			else {
+				len = half;
+			}
+		}
+		return from;
+	}
+
+
+	/**
+	 * Performs a binary search on an already sorted range: finds the last position where an element
+	 * can be inserted without violating the ordering. Sorting is by a user-supplied comparison
+	 * function.
+	 * 
+	 * @param from the index of the first element (inclusive) to be included in the binary search.
+	 * @param to the index of the last element (exclusive) to be included in the binary search.
+	 * @param pos the position of the element to be searched for.
+	 * @param comp the comparison function.
+	 * @return The largest index i such that, for every j in the range <code>[first..i)</code>,
+	 * <code>comp.compare(pos, j)</code> is <code>false</code>.
+	 */
+	private static int upperBound( int from, final int mid, final int pos, final IntComparator comp ) {
+		// if (comp==null) throw new NullPointerException();
+		int len = mid - from;
+		while ( len > 0 ) {
+			int half = len / 2;
+			int middle = from + half;
+			if ( comp.compare( pos, middle ) < 0 ) {
+				len = half;
+			}
+			else {
+				from = middle + 1;
+				len -= half + 1;
+			}
+		}
+		return from;
+	}
+
+	/**
+	 * Returns the index of the median of the three indexed chars.
+	 */
+	private static int med3( final int a, final int b, final int c, final IntComparator comp ) {
+		int ab = comp.compare( a, b );
+		int ac = comp.compare( a, c );
+		int bc = comp.compare( b, c );
+		return ( ab < 0 ?
+				( bc < 0 ? b : ac < 0 ? c : a ) :
+				( bc > 0 ? b : ac > 0 ? c : a ) );
+	}
+
+	/** Sorts the specified range of elements using the specified swapper and according to the order induced by the specified
+	 * comparator using mergesort.
+	 * 
+	 * <p>This sort is guaranteed to be <i>stable</i>: equal elements will not be reordered as a result
+	 * of the sort. The sorting algorithm is an in-place mergesort that is significantly slower than a 
+	 * standard mergesort, as its running time is <i>O</i>(<var>n</var> (log <var>n</var>)<sup>2</sup>), but it does not allocate additional memory; as a result, it can be
+	 * used as a generic sorting algorithm.
+	 * 
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param c the comparator to determine the order of the generic data (arguments are positions).
+	 * @param swapper an object that knows how to swap the elements at any two positions.
+	 */
+	public static void mergeSort( final int from, final int to, final IntComparator c, final Swapper swapper ) {
+		/*
+		 * We retain the same method signature as quickSort. Given only a comparator and swapper we
+		 * do not know how to copy and move elements from/to temporary arrays. Hence, in contrast to
+		 * the JDK mergesorts this is an "in-place" mergesort, i.e. does not allocate any temporary
+		 * arrays. A non-inplace mergesort would perhaps be faster in most cases, but would require
+		 * non-intuitive delegate objects...
+		 */
+		final int length = to - from;
+
+		// Insertion sort on smallest arrays
+		if ( length < SMALL ) {
+			for ( int i = from; i < to; i++ ) {
+				for ( int j = i; j > from && ( c.compare( j - 1, j ) > 0 ); j-- ) {
+					swapper.swap( j, j - 1 );
+				}
+			}
+			return;
+		}
+
+		// Recursively sort halves
+		int mid = ( from + to ) >>> 1;
+		mergeSort( from, mid, c, swapper );
+		mergeSort( mid, to, c, swapper );
+
+		// If list is already sorted, nothing left to do. This is an
+		// optimization that results in faster sorts for nearly ordered lists.
+		if ( c.compare( mid - 1, mid ) <= 0 ) return;
+
+		// Merge sorted halves
+		inPlaceMerge( from, mid, to, c, swapper );
+	}
+
+	/** Sorts the specified range of elements using the specified swapper and according to the order induced by the specified
+	 * comparator using quicksort. 
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param comp the comparator to determine the order of the generic data.
+	 * @param swapper an object that knows how to swap the elements at any two positions.
+	 * 
+	 */
+	public static void quickSort( final int from, final int to, final IntComparator comp, final Swapper swapper ) {
+		final int len = to - from;
+		// Insertion sort on smallest arrays
+		if ( len < SMALL ) {
+			for ( int i = from; i < to; i++ )
+				for ( int j = i; j > from && ( comp.compare( j - 1, j ) > 0 ); j-- ) {
+					swapper.swap( j, j - 1 );
+				}
+			return;
+		}
+
+		// Choose a partition element, v
+		int m = from + len / 2; // Small arrays, middle element
+		if ( len > SMALL ) {
+			int l = from;
+			int n = to - 1;
+			if ( len > MEDIUM ) { // Big arrays, pseudomedian of 9
+				int s = len / 8;
+				l = med3( l, l + s, l + 2 * s, comp );
+				m = med3( m - s, m, m + s, comp );
+				n = med3( n - 2 * s, n - s, n, comp );
+			}
+			m = med3( l, m, n, comp ); // Mid-size, med of 3
+		}
+		// int v = x[m];
+
+		int a = from;
+		int b = a;
+		int c = to - 1;
+		// Establish Invariant: v* (<v)* (>v)* v*
+		int d = c;
+		while ( true ) {
+			int comparison;
+			while ( b <= c && ( ( comparison = comp.compare( b, m ) ) <= 0 ) ) {
+				if ( comparison == 0 ) {
+					if ( a == m ) m = b; // moving target; DELTA to JDK !!!
+					else if ( b == m ) m = a; // moving target; DELTA to JDK !!!
+					swapper.swap( a++, b );
+				}
+				b++;
+			}
+			while ( c >= b && ( ( comparison = comp.compare( c, m ) ) >= 0 ) ) {
+				if ( comparison == 0 ) {
+					if ( c == m ) m = d; // moving target; DELTA to JDK !!!
+					else if ( d == m ) m = c; // moving target; DELTA to JDK !!!
+					swapper.swap( c, d-- );
+				}
+				c--;
+			}
+			if ( b > c ) break;
+			if ( b == m ) m = d; // moving target; DELTA to JDK !!!
+			else if ( c == m ) m = c; // moving target; DELTA to JDK !!!
+			swapper.swap( b++, c-- );
+		}
+
+		// Swap partition elements back to middle
+		int s;
+		int n = to;
+		s = Math.min( a - from, b - a );
+		vecSwap( swapper, from, b - s, s );
+		s = Math.min( d - c, n - d - 1 );
+		vecSwap( swapper, b, n - s, s );
+
+		// Recursively sort non-partition-elements
+		if ( ( s = b - a ) > 1 ) quickSort( from, from + s, comp, swapper );
+		if ( ( s = d - c ) > 1 ) quickSort( n - s, n, comp, swapper );
+	}
+
+
+	/**
+	 * Swaps x[a .. (a+n-1)] with x[b .. (b+n-1)].
+	 */
+	private static void vecSwap( final Swapper swapper, int from, int l, final int s ) {
+		for ( int i = 0; i < s; i++, from++, l++ ) swapper.swap( from, l );
+	}
+}
diff --git a/src/it/unimi/dsi/fastutil/BidirectionalIterator.java b/src/it/unimi/dsi/fastutil/BidirectionalIterator.java
new file mode 100644
index 0000000..cce5ea6
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/BidirectionalIterator.java
@@ -0,0 +1,55 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+import java.util.Iterator;
+import java.util.ListIterator;
+
+/** A bidirectional {@link Iterator}.
+ *
+ * <P>This kind of iterator is essentially a {@link ListIterator} that
+ * does not support {@link ListIterator#previousIndex()} and {@link
+ * ListIterator#nextIndex()}. It is useful for those maps that can easily
+ * provide bidirectional iteration, but provide no index.
+ *
+ * <P>Note that iterators returned by <code>fastutil</code> classes are more
+ * specific, and support skipping. This class serves the purpose of organising
+ * in a cleaner way the relationships between various iterators.
+ *
+ * @see Iterator
+ * @see ListIterator
+ */
+
+public interface BidirectionalIterator<K> extends Iterator<K> {
+
+	/** Returns the previous element from the collection.
+	 *
+	 * @return the previous element from the collection.
+	 * @see java.util.ListIterator#previous()
+	 */
+
+	K previous();
+
+	/** Returns whether there is a previous element.
+	 *
+	 * @return whether there is a previous element.
+	 * @see java.util.ListIterator#hasPrevious()
+	 */
+
+	boolean hasPrevious();
+}
diff --git a/src/it/unimi/dsi/fastutil/BigArrays.java b/src/it/unimi/dsi/fastutil/BigArrays.java
new file mode 100644
index 0000000..a772ae9
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/BigArrays.java
@@ -0,0 +1,489 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ *
+ * For the sorting code:
+ *
+ * Copyright (C) 1999 CERN - European Organization for Nuclear Research.
+ *
+ *   Permission to use, copy, modify, distribute and sell this software and
+ *   its documentation for any purpose is hereby granted without fee,
+ *   provided that the above copyright notice appear in all copies and that
+ *   both that copyright notice and this permission notice appear in
+ *   supporting documentation. CERN makes no representations about the
+ *   suitability of this software for any purpose. It is provided "as is"
+ *   without expressed or implied warranty. 
+ */
+
+
+import it.unimi.dsi.fastutil.ints.IntBigArrayBigList;
+import it.unimi.dsi.fastutil.ints.IntBigArrays;
+import it.unimi.dsi.fastutil.longs.LongComparator;
+
+/** A class providing static methods and objects that do useful things with big arrays.
+ * 
+ * <h2>Introducing big arrays</h2>
+ * 
+ * <p>A <em>big array</em> is an array-of-arrays representation of an array. The length of a big array
+ * is bounded by {@link Long#MAX_VALUE} rather than {@link Integer#MAX_VALUE}. The type of a big array
+ * is that of an array-of-arrays, so a big array of integers is of type <code>int[][]</code>.
+ * 
+ * <p>If <code>a</code> is a big array, <code>a[0]</code>, <code>a[1]</code>, … are called
+ * the <em>segments</em> of the big array. All segments, except possibly for the last one, are of length
+ * {@link #SEGMENT_SIZE}. Given an index <code>i</code> into a big array, there is an associated
+ * <em>{@linkplain #segment(long) segment}</em> and an associated <em>{@linkplain #displacement(long) displacement}</em>
+ * into that segment. Access to single members happens by means of accessors defined in the type-specific
+ * versions (see, e.g., {@link IntBigArrays#get(int[][], long)} and {@link IntBigArrays#set(int[][], long, int)}), 
+ * but you can also use the methods {@link #segment(long)}/{@link #displacement(long)} to access entries manually.
+ * 
+ * <h2>Scanning big arrays</h2>
+ * 
+ * <p>You can scan a big array using the following idiomatic form:
+ * <pre>
+ *   for( int s = 0; s < a.length; s++ ) {
+ *      final int[] t = a[ s ];
+ *      final int l = t.length;
+ *      for( int d = 0; d < l; d++ ) { do something with t[ d ] }
+ *   }
+ * </pre>
+ * or using the (simpler and usually faster) reversed version:
+ * <pre>
+ *   for( int s = a.length; s-- != 0; ) {
+ *      final int[] t = a[ s ];  
+ *      for( int d = t.length; d-- != 0; ) { do something with t[ d ] }
+ *   }
+ * </pre>
+ * <p>Inside the inner loop, the original index in <code>a</code> can be retrieved using {@link #index(int, int) index(segment, displacement)}.
+ * Do <em>not</em> use an additional variable to keep track of the value of the original index, as
+ * computing it on the fly is significantly faster. For instance, to inizialise the <var>i</var>-th element of a big array of
+ * long integers to the value <var>i</var> you should use
+ * <pre>
+ *   for( int s = a.length; s-- != 0; ) {
+ *      final long[] t = a[ s ];  
+ *      for( int d = t.length; d-- != 0; ) t[ d ] = index( s, d );
+ *   }
+ * </pre>
+ *  
+ * <p>Note that caching is essential in making these loops essentially as fast as those scanning standard arrays (as iterations
+ * of the outer loop happen very rarely). Using loops of this kind is extremely faster than using a standard
+ * loop and accessors.
+ * 
+ * <p>In some situations, you might want to iterate over a part of a big array having an offset and a length. In this case, the
+ * idiomatic loops are as follows:
+ * <pre>
+ *   for( int s = segment( offset ); s < segment( offset + length + SEGMENT_MASK ); s++ ) {
+ *      final int[] t = a[ s ];
+ *      final int l = (int)Math.min( t.length, offset + length - start( s ) );
+ *      for( int d = (int)Math.max( 0, offset - start( s ) ); d < l; d++ ) { do something with t[ d ] }
+ *   }
+ * </pre>
+ * or, in a reversed form,
+ * <pre>
+ *   for( int s = segment( offset + length + SEGMENT_MASK ); s-- != segment( offset ); ) {
+ *      final int[] t = a[ s ];
+ *      final int b = (int)Math.max( 0, offset - start( s ) );
+ *      for( int d = (int)Math.min( t.length, offset + length - start( s ) ); d-- != b ; ) { do something with t[ d ] }
+ *   }
+ * </pre>
+ * 
+ * <h2>Literal big arrays</h2>
+ * 
+ * <p>A literal big array can be easily created by using the suitable type-specific <code>wrap()</code> method
+ * (e.g., {@link IntBigArrays#wrap(int[])}) around a literal standard array. Alternatively, for very small
+ * arrays you can just declare a literal array-of-array (e.g., <code>new int[][] { { 1, 2 } }</code>). Be warned,
+ * however, that this can lead to creating illegal big arrays if for some reason (e.g., stress testing) {@link #SEGMENT_SIZE}
+ * is set to a value smaller than the inner array length. 
+ * 
+ * <h2>Big alternatives</h2>
+ * 
+ * <p>If you find the kind of “bare hands” approach to big arrays not enough object-oriented, please use
+ * big lists based on big arrays (.e.g, {@link IntBigArrayBigList}). Big arrays follow the Java tradition of 
+ * considering arrays as a “legal alien”—something in-between an object and a primitive type. This
+ * approach lacks the consistency of a full object-oriented approach, but provides some significant performance gains.
+ *
+ * <h2>Additional methods</h2>
+ * 
+ * <p>In addition to commodity methods, this class contains {@link BigSwapper}-based implementations
+ * of {@linkplain #quickSort(long, long, LongComparator, BigSwapper) quicksort} and of
+ * a stable, in-place {@linkplain #mergeSort(long, long, LongComparator, BigSwapper) mergesort}. These
+ * generic sorting methods can be used to sort any kind of list, but they find their natural
+ * usage, for instance, in sorting big arrays in parallel.
+ *
+ * @see it.unimi.dsi.fastutil.Arrays
+ */
+
+public class BigArrays {
+	/** The shift used to compute the segment associated with an index (equivalently, the logarithm of the segment size). */
+	public final static int SEGMENT_SHIFT = 27;
+	/** The current size of a segment (2<sup>27</sup>) is the largest size that makes
+	 * the physical memory allocation for a single segment strictly smaller
+	 * than 2<sup>31</sup> bytes. */
+	public final static int SEGMENT_SIZE = 1 << SEGMENT_SHIFT;
+	/** The mask used to compute the displacement associated to an index. */
+	public final static int SEGMENT_MASK = SEGMENT_SIZE - 1;
+	
+	protected BigArrays() {}
+	
+	/** Computes the segment associated with a given index.
+	 * 
+	 * @param index an index into a big array.
+	 * @return the associated segment.
+	 */
+	public static int segment( final long index ) {
+		return (int)( index >>> SEGMENT_SHIFT );
+	}
+	
+	/** Computes the displacement associated with a given index.
+	 * 
+	 * @param index an index into a big array.
+	 * @return the associated displacement (in the associated {@linkplain #segment(long) segment}).
+	 */
+	public static int displacement( final long index ) {
+		return (int)( index & SEGMENT_MASK );
+	}
+	
+	/** Computes the starting index of a given segment.
+	 * 
+	 * @param segment the segment of a big array.
+	 * @return the starting index of the segment.
+	 */
+	public static long start( final int segment ) {
+		return (long)segment << SEGMENT_SHIFT;
+	}
+	
+	/** Computes the index associated with given segment and displacement.
+	 * 
+	 * @param segment the segment of a big array.
+	 * @param displacement the displacement into the segment.
+	 * @return the associated index: that is, {@link #segment(long) segment(index(segment, displacement)) == segment} and
+	 * {@link #displacement(long) displacement(index(segment, displacement)) == displacement}.
+	 */
+	public static long index( final int segment, final int displacement ) {
+		return start( segment ) + displacement;
+	}
+	
+	/** Ensures that a range given by its first (inclusive) and last (exclusive) elements fits a big array of given length.
+	 *
+	 * <P>This method may be used whenever a big array range check is needed.
+	 *
+	 * @param bigArrayLength a big-array length.
+	 * @param from a start index (inclusive).
+	 * @param to an end index (inclusive).
+	 * @throws IllegalArgumentException if <code>from</code> is greater than <code>to</code>.
+	 * @throws ArrayIndexOutOfBoundsException if <code>from</code> or <code>to</code> are greater than <code>bigArrayLength</code> or negative.
+	 */
+	public static void ensureFromTo( final long bigArrayLength, final long from, final long to ) {
+		if ( from < 0 ) throw new ArrayIndexOutOfBoundsException( "Start index (" + from + ") is negative" );
+		if ( from > to ) throw new IllegalArgumentException( "Start index (" + from + ") is greater than end index (" + to + ")" );
+		if ( to > bigArrayLength ) throw new ArrayIndexOutOfBoundsException( "End index (" + to + ") is greater than big-array length (" + bigArrayLength + ")" );
+	}
+
+	/** Ensures that a range given by an offset and a length fits a big array of given length.
+	 *
+	 * <P>This method may be used whenever a big array range check is needed.
+	 *
+	 * @param bigArrayLength a big-array length.
+	 * @param offset a start index for the fragment
+	 * @param length a length (the number of elements in the fragment).
+	 * @throws IllegalArgumentException if <code>length</code> is negative.
+	 * @throws ArrayIndexOutOfBoundsException if <code>offset</code> is negative or <code>offset</code>+<code>length</code> is greater than <code>bigArrayLength</code>.
+	 */
+	public static void ensureOffsetLength( final long bigArrayLength, final long offset, final long length ) {
+		if ( offset < 0 ) throw new ArrayIndexOutOfBoundsException( "Offset (" + offset + ") is negative" );
+		if ( length < 0 ) throw new IllegalArgumentException( "Length (" + length + ") is negative" );
+		if ( offset + length > bigArrayLength ) throw new ArrayIndexOutOfBoundsException( "Last index (" + ( offset + length ) + ") is greater than big-array length (" + bigArrayLength + ")" );
+	}
+
+	
+	private static final int SMALL = 7;
+	private static final int MEDIUM = 40;
+
+	/**
+	 * Transforms two consecutive sorted ranges into a single sorted range. The initial ranges are
+	 * <code>[first, middle)</code> and <code>[middle, last)</code>, and the resulting range is
+	 * <code>[first, last)</code>. Elements in the first input range will precede equal elements in
+	 * the second.
+	 */
+	private static void inPlaceMerge( final long from, long mid, final long to, final LongComparator comp, final BigSwapper swapper ) {
+		if ( from >= mid || mid >= to ) return;
+		if ( to - from == 2 ) {
+			if ( comp.compare( mid, from ) < 0 ) {
+				swapper.swap( from, mid );
+			}
+			return;
+		}
+		long firstCut;
+		long secondCut;
+		if ( mid - from > to - mid ) {
+			firstCut = from + ( mid - from ) / 2;
+			secondCut = lowerBound( mid, to, firstCut, comp );
+		}
+		else {
+			secondCut = mid + ( to - mid ) / 2;
+			firstCut = upperBound( from, mid, secondCut, comp );
+		}
+
+		long first2 = firstCut;
+		long middle2 = mid;
+		long last2 = secondCut;
+		if ( middle2 != first2 && middle2 != last2 ) {
+			long first1 = first2;
+			long last1 = middle2;
+			while ( first1 < --last1 )
+				swapper.swap( first1++, last1 );
+			first1 = middle2;
+			last1 = last2;
+			while ( first1 < --last1 )
+				swapper.swap( first1++, last1 );
+			first1 = first2;
+			last1 = last2;
+			while ( first1 < --last1 )
+				swapper.swap( first1++, last1 );
+		}
+
+		mid = firstCut + ( secondCut - mid );
+		inPlaceMerge( from, firstCut, mid, comp, swapper );
+		inPlaceMerge( mid, secondCut, to, comp, swapper );
+	}
+
+	/**
+	 * Performs a binary search on an already sorted range: finds the first position where an
+	 * element can be inserted without violating the ordering. Sorting is by a user-supplied
+	 * comparison function.
+	 * 
+	 * @param mid Beginning of the range.
+	 * @param to One past the end of the range.
+	 * @param firstCut Element to be searched for.
+	 * @param comp Comparison function.
+	 * @return The largest index i such that, for every j in the range <code>[first, i)</code>,
+	 * <code>comp.apply(array[j], x)</code> is <code>true</code>.
+	 */
+	private static long lowerBound( long mid, final long to, final long firstCut, final LongComparator comp ) {
+		long len = to - mid;
+		while ( len > 0 ) {
+			long half = len / 2;
+			long middle = mid + half;
+			if ( comp.compare( middle, firstCut ) < 0 ) {
+				mid = middle + 1;
+				len -= half + 1;
+			}
+			else {
+				len = half;
+			}
+		}
+		return mid;
+	}
+
+	/** Returns the index of the median of three elements. */
+	private static long med3( final long a, final long b, final long c, final LongComparator comp ) {
+		final int ab = comp.compare( a, b );
+		final int ac = comp.compare( a, c );
+		final int bc = comp.compare( b, c );
+		return ( ab < 0 ?
+				( bc < 0 ? b : ac < 0 ? c : a ) :
+				( bc > 0 ? b : ac > 0 ? c : a ) );
+	}
+
+	/** Sorts the specified range of elements using the specified big swapper and according to the order induced by the specified
+	 * comparator using mergesort.
+	 * 
+	 * <p>This sort is guaranteed to be <i>stable</i>: equal elements will not be reordered as a result
+	 * of the sort. The sorting algorithm is an in-place mergesort that is significantly slower than a 
+	 * standard mergesort, as its running time is <i>O</i>(<var>n</var> (log <var>n</var>)<sup>2</sup>), but it does not allocate additional memory; as a result, it can be
+	 * used as a generic sorting algorithm.
+	 * 
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param comp the comparator to determine the order of the generic data (arguments are positions).
+	 * @param swapper an object that knows how to swap the elements at any two positions.
+	 */
+	public static void mergeSort( final long from, final long to, final LongComparator comp, final BigSwapper swapper ) {
+		final long length = to - from;
+
+		// Insertion sort on smallest arrays
+		if ( length < SMALL ) {
+			for ( long i = from; i < to; i++ ) {
+				for ( long j = i; j > from && ( comp.compare( j - 1, j ) > 0 ); j-- ) {
+					swapper.swap( j, j - 1 );
+				}
+			}
+			return;
+		}
+
+		// Recursively sort halves
+		long mid = ( from + to ) >>> 1;
+		mergeSort( from, mid, comp, swapper );
+		mergeSort( mid, to, comp, swapper );
+
+		// If list is already sorted, nothing left to do. This is an
+		// optimization that results in faster sorts for nearly ordered lists.
+		if ( comp.compare( mid - 1, mid ) <= 0 ) return;
+
+		// Merge sorted halves
+		inPlaceMerge( from, mid, to, comp, swapper );
+	}
+
+	/** Sorts the specified range of elements using the specified big swapper and according to the order induced by the specified
+	 * comparator using quicksort. 
+	 * 
+	 * <p>The sorting algorithm is a tuned quicksort adapted from Jon L. Bentley and M. Douglas
+	 * McIlroy, “Engineering a Sort Function”, <i>Software: Practice and Experience</i>, 23(11), pages
+	 * 1249−1265, 1993.
+	 * 
+	 * @param from the index of the first element (inclusive) to be sorted.
+	 * @param to the index of the last element (exclusive) to be sorted.
+	 * @param comp the comparator to determine the order of the generic data.
+	 * @param swapper an object that knows how to swap the elements at any two positions.
+	 * 
+	 */
+	public static void quickSort( final long from, final long to, final LongComparator comp, final BigSwapper swapper ) {
+		final long len = to - from;
+		// Insertion sort on smallest arrays
+		if ( len < SMALL ) {
+			for ( long i = from; i < to; i++ )
+				for ( long j = i; j > from && ( comp.compare( j - 1, j ) > 0 ); j-- ) {
+					swapper.swap( j, j - 1 );
+				}
+			return;
+		}
+
+		// Choose a partition element, v
+		long m = from + len / 2; // Small arrays, middle element
+		if ( len > SMALL ) {
+			long l = from, n = to - 1;
+			if ( len > MEDIUM ) { // Big arrays, pseudomedian of 9
+				long s = len / 8;
+				l = med3( l, l + s, l + 2 * s, comp );
+				m = med3( m - s, m, m + s, comp );
+				n = med3( n - 2 * s, n - s, n, comp );
+			}
+			m = med3( l, m, n, comp ); // Mid-size, med of 3
+		}
+		// long v = x[m];
+
+		long a = from, b = a, c = to - 1, d = c;
+		// Establish Invariant: v* (<v)* (>v)* v*
+		while ( true ) {
+			int comparison;
+			while ( b <= c && ( ( comparison = comp.compare( b, m ) ) <= 0 ) ) {
+				if ( comparison == 0 ) {
+					if ( a == m ) m = b; // moving target; DELTA to JDK !!!
+					else if ( b == m ) m = a; // moving target; DELTA to JDK !!!
+					swapper.swap( a++, b );
+				}
+				b++;
+			}
+			while ( c >= b && ( ( comparison = comp.compare( c, m ) ) >= 0 ) ) {
+				if ( comparison == 0 ) {
+					if ( c == m ) m = d; // moving target; DELTA to JDK !!!
+					else if ( d == m ) m = c; // moving target; DELTA to JDK !!!
+					swapper.swap( c, d-- );
+				}
+				c--;
+			}
+			if ( b > c ) break;
+			if ( b == m ) m = d; // moving target; DELTA to JDK !!!
+			else if ( c == m ) m = c; // moving target; DELTA to JDK !!!
+			swapper.swap( b++, c-- );
+		}
+
+		// Swap partition elements back to middle
+		long s;
+		long n = from + len;
+		s = Math.min( a - from, b - a );
+		vecSwap( swapper, from, b - s, s );
+		s = Math.min( d - c, n - d - 1 );
+		vecSwap( swapper, b, n - s, s );
+
+		// Recursively sort non-partition-elements
+		if ( ( s = b - a ) > 1 ) quickSort( from, from + s, comp, swapper );
+		if ( ( s = d - c ) > 1 ) quickSort( n - s, n, comp, swapper );
+	}
+
+	/**
+	 * Performs a binary search on an already-sorted range: finds the last position where an element
+	 * can be inserted without violating the ordering. Sorting is by a user-supplied comparison
+	 * function.
+	 * 
+	 * @param from Beginning of the range.
+	 * @param mid One past the end of the range.
+	 * @param secondCut Element to be searched for.
+	 * @param comp Comparison function.
+	 * @return The largest index i such that, for every j in the range <code>[first, i)</code>,
+	 * <code>comp.apply(x, array[j])</code> is <code>false</code>.
+	 */
+	private static long upperBound( long from, final long mid, final long secondCut, final LongComparator comp ) {
+		long len = mid - from;
+		while ( len > 0 ) {
+			long half = len / 2;
+			long middle = from + half;
+			if ( comp.compare( secondCut, middle ) < 0 ) {
+				len = half;
+			}
+			else {
+				from = middle + 1;
+				len -= half + 1;
+			}
+		}
+		return from;
+	}
+
+	/**
+	 * Swaps x[a .. (a+n-1)] with x[b .. (b+n-1)].
+	 */
+	private static void vecSwap( final BigSwapper swapper, long from, long l, final long s ) {
+		for ( int i = 0; i < s; i++, from++, l++ ) swapper.swap( from, l );
+	}
+	
+	public static void main( final String arg[] ) {
+		int[][] a = IntBigArrays.newBigArray( 1L << Integer.parseInt( arg[ 0 ] ) );
+		long x, y, z, start;
+
+		for( int k = 10; k-- != 0; ) {
+
+			start = -System.currentTimeMillis();
+
+			x = 0;
+			for( long i = IntBigArrays.length( a ); i-- != 0; ) x ^= i ^ IntBigArrays.get( a, i );
+			if ( x == 0 ) System.err.println();
+
+			System.out.println( "Single loop: " + ( start + System.currentTimeMillis() ) + "ms" );
+
+			start = -System.currentTimeMillis();
+
+			y = 0;
+			for( int i = a.length; i-- != 0; ) {
+				final int[] t = a[ i ];
+				for( int d = t.length; d-- != 0; ) y ^= t[ d ] ^ index( i, d ); 
+			}
+			if ( y == 0 ) System.err.println();
+			if ( x != y ) throw new AssertionError();
+
+			System.out.println( "Double loop: " + ( start + System.currentTimeMillis() ) + "ms" );
+
+			z = 0;
+			long j = IntBigArrays.length( a );
+			for( int i = a.length; i-- != 0; ) {
+				final int[] t = a[ i ];
+				for( int d = t.length; d-- != 0; ) y ^= t[ d ] ^ --j; 
+			}
+			if ( z == 0 ) System.err.println();
+			if ( x != z ) throw new AssertionError();
+
+			System.out.println( "Double loop (with additional index): " + ( start + System.currentTimeMillis() ) + "ms" );
+		}
+	}
+}
diff --git a/src/it/unimi/dsi/fastutil/BigList.java b/src/it/unimi/dsi/fastutil/BigList.java
new file mode 100644
index 0000000..41b7dc8
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/BigList.java
@@ -0,0 +1,125 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+import java.util.Collection;
+import java.util.List;
+
+/** A list with big (i.e., 64-bit) indices.
+ *
+ * <P>Instances of this class implement the same semantics as that of {@link List}: however,
+ * setters methods use long indices, getters return long values, and returned iterators are actually
+ * of type {@link BigListIterator}.
+ */
+
+public interface BigList<K> extends Collection<K>, Size64 {
+
+	/** Returns the element at the specified position.
+	 * 
+	 * @param index a position in the big list.
+	 * @return the element at the specified position.
+	 * @see List#get(int)
+	 */
+	public K get( long index );
+
+	/** Removes the element at the specified position.
+	 * 
+	 * @param index a position in the big list.
+	 * @return the element previously at the specified position.
+	 * @see List#remove(int)
+	 */
+	public K remove( long index );
+
+	/** Replaces the element at the specified position in this big list with the specified element (optional operation). 
+	 * 
+	 * @param index a position in the big list.
+	 * @param element the element to be stored at the specified position.
+	 * @return the element previously at the specified positions.
+	 * @see List#set(int,Object)
+	 */
+	public K set( long index, K element );
+
+	/** Inserts the specified element at the specified position in this big list (optional operation).
+	 * 
+	 * @param index a position in the big list.
+	 * @param element an element to be inserted.
+	 * @see List#add(int,Object)
+	 */
+	public void add( long index, K element );
+	
+	/** Sets the size of this big list.
+	 *
+	 * <P>If the specified size is smaller than the current size, the last elements are
+	 * discarded. Otherwise, they are filled with 0/<code>null</code>/<code>false</code>.
+	 *
+	 * @param size the new size.
+	 */
+
+	void size( long size );
+
+	/** Inserts all of the elements in the specified collection into this big list at the specified position (optional operation).
+	 * 
+	 * @param index index at which to insert the first element from the specified collection.
+	 * @param c collection containing elements to be added to this big list.
+	 * @return <code>true</code> if this big list changed as a result of the call
+	 * @see List#addAll(int, Collection) 
+	 */	
+	public boolean addAll( long index, Collection<? extends K> c );
+
+	/** Returns the index of the first occurrence of the specified element in this big list, or -1 if this big list does not contain the element.
+	 * 
+	 * @param o the object to search for.
+	 * @return the index of the first occurrence of the specified element in this big list, or -1 if this big list does not contain the element.
+	 * @see List#indexOf(Object)
+	 */
+	public long indexOf( Object o );
+	
+	/** Returns the index of the last occurrence of the specified element in this big list, or -1 if this big list does not contain the element. 
+	 * 
+	 * @param o the object to search for.
+	 * @return the index of the last occurrence of the specified element in this big list, or -1 if this big list does not contain the element.
+ 	 * @see List#lastIndexOf(Object)
+	 */
+	public long lastIndexOf( Object o );
+	
+	/** Returns a big-list iterator over the elements in this big list.
+	 * 
+	 * @return a big-list iterator over the elements in this big list.
+	 * @see List#listIterator()
+	 */
+
+	public BigListIterator<K> listIterator();
+
+	/** Returns a big-list iterator of the elements in this big list, starting at the specified position in this big list.
+	 * 
+	 * @param index index of first element to be returned from the big-list iterator.
+	 * @return a big-list iterator of the elements in this big list, starting at the specified position in
+	 * this big list.
+	 * @see List#listIterator(int)
+	 */
+	public BigListIterator<K> listIterator( long index );
+
+	/** Returns a big sublist view of this big list.
+	 * 
+	 * @param from the starting element (inclusive).
+	 * @param to the ending element (exclusive).
+	 * @return a big sublist view of this big list.
+	 * @see List#subList(int, int)
+	 */
+	public BigList<K> subList( long from, long to );
+}
diff --git a/src/it/unimi/dsi/fastutil/BigListIterator.java b/src/it/unimi/dsi/fastutil/BigListIterator.java
new file mode 100644
index 0000000..23be470
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/BigListIterator.java
@@ -0,0 +1,62 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+import java.util.Iterator;
+import java.util.ListIterator;
+
+/** A list iterator over a {@link BigList}.
+ *
+ * <P>This kind of iterator is essentially a {@link ListIterator} with long indices.
+ *
+ * @see Iterator
+ * @see ListIterator
+ */
+
+public interface BigListIterator<K> extends BidirectionalIterator<K> {
+	/** Returns the index of the element that would be returned by a subsequent call to next.
+	 * (Returns list size if the list iterator is at the end of the list.)
+	 * 
+	 * @return the index of the element that would be returned by a subsequent call to next, or list
+	 * size if list iterator is at end of list.
+	 * @see ListIterator#nextIndex()
+	 */
+	long nextIndex();
+
+	/** Returns the index of the element that would be returned by a subsequent call to previous.
+	 * (Returns -1 if the list iterator is at the beginning of the list.)
+	 * 
+	 * @return the index of the element that would be returned by a subsequent call to previous, or
+	 * -1 if list iterator is at beginning of list.
+	 * @see ListIterator#previousIndex()
+	 */
+
+	long previousIndex();
+
+	/** Skips the given number of elements.
+	 *
+	 * <P>The effect of this call is exactly the same as that of
+	 * calling {@link #next()} for <code>n</code> times (possibly stopping
+	 * if {@link #hasNext()} becomes false).
+	 *
+	 * @param n the number of elements to skip.
+	 * @return the number of elements actually skipped.
+	 */
+
+	long skip( long n );
+}
diff --git a/src/it/unimi/dsi/fastutil/BigSwapper.java b/src/it/unimi/dsi/fastutil/BigSwapper.java
new file mode 100644
index 0000000..2958c55
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/BigSwapper.java
@@ -0,0 +1,32 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+/** An object that can swap elements whose positions is specified by longs. 
+ * 
+ * @see BigArrays#quickSort(long, long, it.unimi.dsi.fastutil.longs.LongComparator, BigSwapper) 
+ */
+
+public interface BigSwapper {
+	/** Swaps the data at the given positions.
+	 * 
+	 * @param a the first position to swap.
+	 * @param b the second position to swap.
+	 */
+	void swap( long a, long b );
+}
diff --git a/src/it/unimi/dsi/fastutil/Function.java b/src/it/unimi/dsi/fastutil/Function.java
new file mode 100644
index 0000000..bae3a38
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/Function.java
@@ -0,0 +1,101 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+/** A function mapping keys into values.
+ * 
+ * <p>Instances of this class represent functions: the main difference with {@link java.util.Map}
+ * is that functions do not in principle allow enumeration of their domain or range. The need for
+ * this interface lies in the existence of several highly optimized implementations of 
+ * functions (e.g., minimal perfect hashes) which do not actually store their domain or range explicitly.
+ * In case the domain is known, {@link #containsKey(Object)} can be used to perform membership queries. 
+ *
+ * <p>The choice of naming all methods exactly as in {@link java.util.Map} makes it possible
+ * for all type-specific maps to extend type-specific functions (e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntMap}
+ * extends {@link it.unimi.dsi.fastutil.ints.Int2IntFunction}). However, {@link #size()} is allowed to return -1 to denote
+ * that the number of keys is not available (e.g., in the case of a string hash function).
+ * 
+ * <p>Note that there is an {@link it.unimi.dsi.fastutil.objects.Object2ObjectFunction} that
+ * can also set its default return value.
+ * 
+ * <p><strong>Warning</strong>: Equality of functions is <em>not specified</em>
+ * by contract, and it will usually be <em>by reference</em>, as there is no way to enumerate the keys
+ * and establish whether two functions represent the same mathematical entity.
+ *
+ * @see java.util.Map
+ */
+
+public interface Function<K,V> {
+
+	/** Associates the specified value with the specified key in this function (optional operation).
+	 *
+	 * @param key the key.
+	 * @param value the value.
+	 * @return the old value, or <code>null</code> if no value was present for the given key.
+	 * @see java.util.Map#put(Object,Object)
+	 */
+
+	V put( K key, V value );
+
+	/** Returns the value associated by this function to the specified key. 
+	 *
+	 * @param key the key.
+	 * @return the corresponding value, or <code>null</code> if no value was present for the given key.
+	 * @see java.util.Map#get(Object)
+	 */
+
+	V get( Object key );
+
+	/** Returns true if this function contains a mapping for the specified key. 
+	 *
+	 * <p>Note that for some kind of functions (e.g., hashes) this method
+	 * will always return true.
+	 *
+	 * @param key the key.
+	 * @return true if this function associates a value to <code>key</code>.
+	 * @see java.util.Map#containsKey(Object)
+	 */
+
+	 boolean containsKey( Object key );
+
+	/** Removes this key and the associated value from this function if it is present (optional operation).
+	 *  
+	 * @param key
+	 * @return the old value, or <code>null</code> if no value was present for the given key.
+	 * @see java.util.Map#remove(Object)
+	 */
+	
+	V remove( Object key );
+
+	/** Returns the intended number of keys in this function, or -1 if no such number exists.
+	 * 
+	 * <p>Most function implementations will have some knowledge of the intended number of keys
+	 * in their domain. In some cases, however, this might not be possible.
+	 * 
+	 *  @return the intended number of keys in this function, or -1 if that number is not available.
+	 */
+    int size();
+
+	/** Removes all associations from this function (optional operation).
+	 *  
+	 * @see java.util.Map#clear()
+	 */
+	
+	void clear();
+
+}
diff --git a/src/it/unimi/dsi/fastutil/Hash.java b/src/it/unimi/dsi/fastutil/Hash.java
new file mode 100644
index 0000000..7bc912b
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/Hash.java
@@ -0,0 +1,173 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+/** Basic data for all hash-based classes.
+ *
+ * <h2>Historical note</h2>
+ * 
+ * <p><strong>Warning:</strong> the following comments are here for historical reasons,
+ * and apply just to the <em>double hash</em> classes that can be optionally generated.
+ * The standard <code>fastutil</code> distribution since 6.1.0 uses linear-probing hash
+ * tables, and tables are always sized as powers of two. 
+ *
+ * <p>The classes in <code>fastutil</code> are built around open-addressing hashing
+ * implemented <em>via</em> double hashing. Following Knuth's suggestions in the third volume of <em>The Art of Computer
+ * Programming</em>, we use for the table size a prime <var>p</var> such that
+ * <var>p</var>-2 is also prime. In this way hashing is implemented with modulo <var>p</var>,
+ * and secondary hashing with modulo <var>p</var>-2.
+ *
+ * <p>Entries in a table can be in three states: {@link #FREE}, {@link #OCCUPIED} or {@link #REMOVED}.
+ * The naive handling of removed entries requires that you search for a free entry as if they were occupied. However,
+ * <code>fastutil</code> implements two useful optimizations, based on the following invariant:
+ * <blockquote>
+ * Let <var>i</var><sub>0</sub>, <var>i</var><sub>1</sub>, &hellip, <var>i</var><sub><var>p</var>-1</sub> be
+ * the permutation of the table indices induced by the key <var>k</var>, that is, <var>i</var><sub>0</sub> is the hash
+ * of <var>k</var> and the following indices are obtained by adding (modulo <var>p</var>) the secondary hash plus one.
+ * If there is a {@link #OCCUPIED} entry with key <var>k</var>, its index in the sequence above comes <em>before</em>
+ * the indices of any {@link #REMOVED} entries with key <var>k</var>.
+ * </blockquote>
+ * 
+ * <p>When we search for the key <var>k</var> we scan the entries in the
+ * sequence <var>i</var><sub>0</sub>, <var>i</var><sub>1</sub>, &hellip,
+ * <var>i</var><sub><var>p</var>-1</sub> and stop when <var>k</var> is found,
+ * when we finished the sequence or when we find a {@link #FREE} entry. Note
+ * that the correctness of this procedure it is not completely trivial. Indeed,
+ * when we stop at a {@link #REMOVED} entry with key <var>k</var> we must rely
+ * on the invariant to be sure that no {@link #OCCUPIED} entry with the same
+ * key can appear later. If we insert and remove frequently the same entries,
+ * this optimization can be very effective (note, however, that when using
+ * objects as keys or values deleted entries are set to a special fixed value to
+ * optimize garbage collection).
+ *
+ * <p>Moreover, during the probe we keep the index of the first {@link #REMOVED} entry we meet. 
+ * If we actually have to insert a new element, we use that 
+ * entry if we can, thus avoiding to pollute another {@link #FREE} entry. Since this position comes
+ * <i>a fortiori</i> before any {@link #REMOVED} entries with the same key, we are also keeping the invariant true.
+ */
+
+public interface Hash {
+
+	/** The initial default size of a hash table. */
+	final public int DEFAULT_INITIAL_SIZE = 16;
+	/** The default load factor of a hash table. */
+	final public float DEFAULT_LOAD_FACTOR = .75f;
+	/** The load factor for a (usually small) table that is meant to be particularly fast. */
+	final public float FAST_LOAD_FACTOR = .5f;
+	/** The load factor for a (usually very small) table that is meant to be extremely fast. */
+	final public float VERY_FAST_LOAD_FACTOR = .25f;
+
+	/** A generic hash strategy.
+	 *
+	 * <P>Custom hash structures (e.g., {@link
+	 * it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet}) allow to hash objects
+	 * using arbitrary functions, a typical example being that of {@linkplain
+	 * it.unimi.dsi.fastutil.ints.IntArrays#HASH_STRATEGY arrays}. Of course,
+	 * one has to compare objects for equality consistently with the chosen
+	 * function. A <em>hash strategy</em>, thus, specifies an {@linkplain
+	 * #equals(Object,Object) equality method} and a {@linkplain
+	 * #hashCode(Object) hash function}, with the obvious property that
+	 * equal objects must have the same hash code.
+	 *
+	 * <P>If your custom collection must be able to contain <code>null</code>,
+	 * then your strategy must be able to handle <code>null</code>, too.
+	 */
+
+	public interface Strategy<K> {
+
+		/** Returns the hash code of the specified object with respect to this hash strategy.
+		 *
+		 * @param o an object (or <code>null</code>).
+		 * @return the hash code of the given object with respect to this hash strategy.
+		 */
+
+		public int hashCode( K o );
+
+		/** Returns true if the given objects are equal with respect to this hash strategy.
+		 *
+		 * @param a an object (or <code>null</code>).
+		 * @param b another object (or <code>null</code>).
+		 * @return true if the two specified objects are equal with respect to this hash strategy.
+		 */
+		public boolean equals( K a, K b );
+	}
+
+	/** The default growth factor of a hash table. */
+	final public int DEFAULT_GROWTH_FACTOR = 16;
+	/** The state of a free hash table entry. */
+	final public byte FREE = 0;
+	/** The state of a occupied hash table entry. */
+	final public byte OCCUPIED = -1;
+	/** The state of a hash table entry freed by a deletion. */
+	final public byte REMOVED = 1;
+	 
+	/** A list of primes to be used as table sizes. The <var>i</var>-th element is 
+	 *  the largest prime <var>p</var> smaller than 2<sup>(<var>i</var>+28)/16</sup> 
+	 * and such that <var>p</var>-2 is also prime (or 1, for the first few entries). */
+
+	final public int PRIMES[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 5, 5, 5, 5, 5, 7, 7, 7,
+								  7, 7, 7, 7, 7, 7, 7, 7, 13, 13, 13, 13, 13, 13, 13, 13, 19, 19, 19, 19, 19,
+								  19, 19, 19, 19, 19, 19, 19, 31, 31, 31, 31, 31, 31, 31, 43, 43, 43, 43, 43,
+								  43, 43, 43, 61, 61, 61, 61, 61, 73, 73, 73, 73, 73, 73, 73, 103, 103, 109,
+								  109, 109, 109, 109, 139, 139, 151, 151, 151, 151, 181, 181, 193, 199, 199,
+								  199, 229, 241, 241, 241, 271, 283, 283, 313, 313, 313, 349, 349, 349, 349,
+								  421, 433, 463, 463, 463, 523, 523, 571, 601, 619, 661, 661, 661, 661, 661,
+								  823, 859, 883, 883, 883, 1021, 1063, 1093, 1153, 1153, 1231, 1321, 1321,
+								  1429, 1489, 1489, 1621, 1699, 1789, 1873, 1951, 2029, 2131, 2143, 2311,
+								  2383, 2383, 2593, 2731, 2803, 3001, 3121, 3259, 3391, 3583, 3673, 3919,
+								  4093, 4273, 4423, 4651, 4801, 5023, 5281, 5521, 5743, 5881, 6301, 6571,
+								  6871, 7129, 7489, 7759, 8089, 8539, 8863, 9283, 9721, 10141, 10531, 11071,
+								  11551, 12073, 12613, 13009, 13759, 14323, 14869, 15649, 16363, 17029,
+								  17839, 18541, 19471, 20233, 21193, 22159, 23059, 24181, 25171, 26263,
+								  27541, 28753, 30013, 31321, 32719, 34213, 35731, 37309, 38923, 40639,
+								  42463, 44281, 46309, 48313, 50461, 52711, 55051, 57529, 60091, 62299,
+								  65521, 68281, 71413, 74611, 77713, 81373, 84979, 88663, 92671, 96739,
+								  100801, 105529, 109849, 115021, 120079, 125509, 131011, 136861, 142873,
+								  149251, 155863, 162751, 169891, 177433, 185071, 193381, 202129, 211063,
+								  220021, 229981, 240349, 250969, 262111, 273643, 285841, 298411, 311713,
+								  325543, 339841, 355009, 370663, 386989, 404269, 422113, 440809, 460081,
+								  480463, 501829, 524221, 547399, 571603, 596929, 623353, 651019, 679909,
+								  709741, 741343, 774133, 808441, 844201, 881539, 920743, 961531, 1004119,
+								  1048573, 1094923, 1143283, 1193911, 1246963, 1302181, 1359733, 1420039,
+								  1482853, 1548541, 1616899, 1688413, 1763431, 1841293, 1922773, 2008081,
+								  2097133, 2189989, 2286883, 2388163, 2493853, 2604013, 2719669, 2840041,
+								  2965603, 3097123, 3234241, 3377191, 3526933, 3682363, 3845983, 4016041,
+								  4193803, 4379719, 4573873, 4776223, 4987891, 5208523, 5439223, 5680153,
+								  5931313, 6194191, 6468463, 6754879, 7053331, 7366069, 7692343, 8032639,
+								  8388451, 8759953, 9147661, 9552733, 9975193, 10417291, 10878619, 11360203,
+								  11863153, 12387841, 12936529, 13509343, 14107801, 14732413, 15384673,
+								  16065559, 16777141, 17519893, 18295633, 19105483, 19951231, 20834689,
+								  21757291, 22720591, 23726449, 24776953, 25873963, 27018853, 28215619,
+								  29464579, 30769093, 32131711, 33554011, 35039911, 36591211, 38211163,
+								  39903121, 41669479, 43514521, 45441199, 47452879, 49553941, 51747991,
+								  54039079, 56431513, 58930021, 61539091, 64263571, 67108669, 70079959,
+								  73182409, 76422793, 79806229, 83339383, 87029053, 90881083, 94906249,
+								  99108043, 103495879, 108077731, 112863013, 117860053, 123078019, 128526943,
+								  134217439, 140159911, 146365159, 152845393, 159612601, 166679173,
+								  174058849, 181765093, 189812341, 198216103, 206991601, 216156043,
+								  225726379, 235720159, 246156271, 257054491, 268435009, 280319203,
+								  292730833, 305691181, 319225021, 333358513, 348117151, 363529759,
+								  379624279, 396432481, 413983771, 432312511, 451452613, 471440161,
+								  492312523, 514109251, 536870839, 560640001, 585461743, 611382451,
+								  638450569, 666717199, 696235363, 727060069, 759249643, 792864871,
+								  827967631, 864625033, 902905501, 942880663, 984625531, 1028218189,
+								  1073741719, 1121280091, 1170923713, 1222764841, 1276901371, 1333434301,
+								  1392470281, 1454120779, 1518500173, 1585729993, 1655935399, 1729249999,
+								  1805811253, 1885761133, 1969251079, 2056437379, 2147482951 };
+
+}
diff --git a/src/it/unimi/dsi/fastutil/HashCommon.java b/src/it/unimi/dsi/fastutil/HashCommon.java
new file mode 100644
index 0000000..a0af794
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/HashCommon.java
@@ -0,0 +1,173 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+/** Common code for all hash-based classes. */
+
+public class HashCommon {
+
+	protected HashCommon() {};
+
+	/** This reference is used to fill keys and values of removed entries (if
+		they are objects). <code>null</code> cannot be used as it would confuse the
+		search algorithm in the presence of an actual <code>null</code> key. */ 
+	public static final Object REMOVED = new Object();
+
+	/** Avalanches the bits of an integer by applying the finalisation step of MurmurHash3.
+	 * 
+	 * <p>This function implements the finalisation step of Austin Appleby's <a href="http://sites.google.com/site/murmurhash/">MurmurHash3</a>.
+	 * Its purpose is to avalanche the bits of the argument to within 0.25% bias. It is used, among other things, to scramble quickly (but deeply) the hash
+	 * values returned by {@link Object#hashCode()}.
+	 * 
+	 * @param x an integer.
+	 * @return a hash value with good avalanching properties.
+	 */	
+	public final static int murmurHash3( int x ) {
+		x ^= x >>> 16;
+		x *= 0x85ebca6b;
+		x ^= x >>> 13;
+		x *= 0xc2b2ae35;
+		x ^= x >>> 16;
+		return x;
+	}
+
+	/** Avalanches the bits of a long integer by applying the finalisation step of MurmurHash3.
+	 * 
+	 * <p>This function implements the finalisation step of Austin Appleby's <a href="http://sites.google.com/site/murmurhash/">MurmurHash3</a>.
+	 * Its purpose is to avalanche the bits of the argument to within 0.25% bias. It is used, among other things, to scramble quickly (but deeply) the hash
+	 * values returned by {@link Object#hashCode()}.
+	 * 
+	 * @param x a long integer.
+	 * @return a hash value with good avalanching properties.
+	 */	
+	public final static long murmurHash3( long x ) {
+		x ^= x >>> 33;
+		x *= 0xff51afd7ed558ccdL;
+		x ^= x >>> 33;
+		x *= 0xc4ceb9fe1a85ec53L;
+		x ^= x >>> 33;
+
+		return x;
+	}
+
+	/** Returns the hash code that would be returned by {@link Float#hashCode()}.
+	 *
+	 * @return the same code as {@link Float#hashCode() new Float(f).hashCode()}.
+	 */
+
+	final public static int float2int( final float f ) {
+		return Float.floatToRawIntBits( f );
+	}
+
+	/** Returns the hash code that would be returned by {@link Double#hashCode()}.
+	 *
+	 * @return the same code as {@link Double#hashCode() new Double(f).hashCode()}.
+	 */
+
+	final public static int double2int( final double d ) {
+		final long l = Double.doubleToRawLongBits( d );
+		return (int)( l ^ ( l >>> 32 ) );
+	}
+
+	/** Returns the hash code that would be returned by {@link Long#hashCode()}.
+	 * 
+	 * @return the same code as {@link Long#hashCode() new Long(f).hashCode()}.
+	 */
+	final public static int long2int( final long l ) {
+		return (int)( l ^ ( l >>> 32 ) );
+	}
+	
+	/** Return the least power of two greater than or equal to the specified value.
+	 * 
+	 * <p>Note that this function will return 1 when the argument is 0.
+	 * 
+	 * @param x an integer smaller than or equal to 2<sup>30</sup>.
+	 * @return the least power of two greater than or equal to the specified value.
+	 */
+	public static int nextPowerOfTwo( int x ) {
+		if ( x == 0 ) return 1;
+		x--;
+		x |= x >> 1;
+		x |= x >> 2;
+		x |= x >> 4;
+		x |= x >> 8;
+		return ( x | x >> 16 ) + 1;
+	}
+
+	/** Return the least power of two greater than or equal to the specified value.
+	 * 
+	 * <p>Note that this function will return 1 when the argument is 0.
+	 * 
+	 * @param x a long integer smaller than or equal to 2<sup>62</sup>.
+	 * @return the least power of two greater than or equal to the specified value.
+	 */
+	public static long nextPowerOfTwo( long x ) {
+		if ( x == 0 ) return 1;
+		x--;
+		x |= x >> 1;
+		x |= x >> 2;
+		x |= x >> 4;
+		x |= x >> 8;
+		x |= x >> 16;
+		return ( x | x >> 32 ) + 1;
+	}
+
+
+	/** Returns the maximum number of entries that can be filled before rehashing. 
+	 *
+	 * @param n the size of the backing array.
+	 * @param f the load factor.
+	 * @return the maximum number of entries before rehashing. 
+	 */
+	public static int maxFill( final int n, final float f ) {
+		return (int)Math.ceil( n * f );
+	}
+
+	/** Returns the maximum number of entries that can be filled before rehashing. 
+	 * 
+	 * @param n the size of the backing array.
+	 * @param f the load factor.
+	 * @return the maximum number of entries before rehashing. 
+	 */
+	public static long maxFill( final long n, final float f ) {
+		return (long)Math.ceil( n * f );
+	}
+
+	/** Returns the least power of two smaller than or equal to 2<sup>30</sup> and larger than or equal to <code>Math.ceil( expected / f )</code>. 
+	 * 
+	 * @param expected the expected number of elements in a hash table.
+	 * @param f the load factor.
+	 * @return the minimum possible size for a backing array.
+	 * @throws IllegalArgumentException if the necessary size is larger than 2<sup>30</sup>.
+	 */
+	public static int arraySize( final int expected, final float f ) {
+		final long s = nextPowerOfTwo( (long)Math.ceil( expected / f ) );
+		if ( s > (1 << 30) ) throw new IllegalArgumentException( "Too large (" + expected + " expected elements with load factor " + f + ")" );
+		return (int)s;
+	}
+
+	/** Returns the least power of two larger than or equal to <code>Math.ceil( expected / f )</code>. 
+	 * 
+	 * @param expected the expected number of elements in a hash table.
+	 * @param f the load factor.
+	 * @return the minimum possible size for a backing big array.
+	 */
+	public static long bigArraySize( final long expected, final float f ) {
+		return nextPowerOfTwo( (long)Math.ceil( expected / f ) );
+	}
+}
\ No newline at end of file
diff --git a/src/it/unimi/dsi/fastutil/IndirectPriorityQueue.java b/src/it/unimi/dsi/fastutil/IndirectPriorityQueue.java
new file mode 100644
index 0000000..3162d9a
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/IndirectPriorityQueue.java
@@ -0,0 +1,161 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+import java.util.Comparator;
+import java.util.NoSuchElementException;
+
+/** An indirect priority queue.
+ *
+ * <P>An indirect priority queue provides a way to {@linkplain #enqueue(int)
+ * enqueue} by index elements taken from a given <em>reference list</em>,
+ * and to {@linkplain #dequeue() dequeue} them in some specified order.
+ * Elements that are <em>smaller</em> in the specified order are
+ * dequeued first. It
+ * is also possible to get the {@linkplain #first() index of the first element}, that
+ * is, the index that would be dequeued next.
+ *
+ * <P>Additionally, the queue may provide a method to peek at the index of the
+ * element that would be dequeued {@linkplain #last() last}.
+ *
+ * <P>The reference list should not change during queue operations (or, more
+ * precisely, the relative order of the elements corresponding to indices in the queue should not
+ * change). Nonetheless, some implementations may give the caller a way to
+ * notify the queue that the {@linkplain #changed() first element has changed its 
+ * relative position in the order}.
+ *
+ * <P>Optionally, an indirect priority queue may even provide methods to notify
+ * {@linkplain #changed(int) the change of <em>any</em> element of the
+ * reference list}, to check {@linkplain #contains(int) the presence of
+ * an index in the queue}, and to {@linkplain #remove(int) remove an index from the queue}. 
+ * It may even allow to notify that {@linkplain #allChanged() all elements have changed}.
+ *
+ * <P>It is always possible to enqueue two distinct indices corresponding to
+ * equal elements of the reference list. However, depending on the
+ * implementation, it may or may not be possible to enqueue twice the same
+ * index.
+ *
+ * <P>Note that <em>all element manipulation happens via indices</em>.
+ */
+
+public interface IndirectPriorityQueue<K> {
+
+	/** Enqueues a new element.
+	 *
+	 * @param index the element to enqueue..
+	 */
+
+	void enqueue( int index );
+
+	/** Dequeues the {@linkplain #first() first} element from this queue.
+	 *
+	 * @return the dequeued element.
+	 * @throws NoSuchElementException if this queue is empty.
+	 */
+
+	int dequeue();
+
+	/** Checks whether this queue is empty.
+	 *
+	 * @return true if this queue is empty.
+	 */
+
+	boolean isEmpty();
+
+	/** Returns the number of elements in this queue.
+	 *
+	 * @return the number of elements in this queue.
+	 */
+
+	int size();
+
+	/** Removes all elements from this queue.
+	 */
+
+	void clear();
+
+	/** Returns the first element of this queue.
+	 *
+	 * @return the first element.
+	 * @throws NoSuchElementException if this queue is empty.
+	 */
+
+	int first();
+
+	/** Returns the last element of this queue, that is, the element the would be dequeued last (optional operation).
+	 *
+	 * @return the last element.
+	 * @throws NoSuchElementException if this queue is empty.
+	 */
+
+	int last();
+
+	/** Notifies this queue that the {@linkplain #first() first element} has changed (optional operation).
+	 *
+	 */
+
+	void changed();
+
+    /** Returns the comparator associated with this queue, or <code>null</code> if it uses its elements' natural ordering.
+	 *
+	 * @return the comparator associated with this sorted set, or <code>null</code> if it uses its elements' natural ordering.
+	 */
+	Comparator <? super K> comparator();
+
+	/** Notifies this queue that the specified element has changed (optional operation).
+	 *
+	 * <P>Note that the specified element must belong to this queue.
+	 *
+	 * @param index the element that has changed.
+	 * @throws NoSuchElementException if the specified element is not in this queue.
+	 */
+
+	public void changed( int index );
+
+	/** Notifies this queue that the all elements have changed (optional operation).
+	 */
+
+	public void allChanged();
+
+	/** Checks whether a given index belongs to this queue (optional operation).
+	 * 
+	 * @return true if the specified index belongs to this queue.
+	 */
+	public boolean contains( int index );
+	
+	/** Removes the specified element from this queue (optional operation).
+	 *
+	 * @param index the element to be removed.
+	 * @return true if the index was in the queue.
+	 */
+
+	public boolean remove( int index );
+
+    /** Retrieves the front of this queue in a given array (optional operation).
+     *
+     * <p>The <em>front</em> of an indirect queue is the set of indices whose associated elements in the reference array 
+     * are equal to the element associated to the {@linkplain #first() first index}. These indices can be always obtain by dequeueing, but 
+     * this method should retrieve efficiently such indices in the given array without modifying the state of this queue.
+     * 
+     * @param a an array large enough to hold the front (e.g., at least long as the reference array).
+     * @return the number of elements actually written (starting from the first position of <code>a</code>).
+     */
+
+	public int front( final int[] a );
+
+}
diff --git a/src/it/unimi/dsi/fastutil/IndirectPriorityQueues.java b/src/it/unimi/dsi/fastutil/IndirectPriorityQueues.java
new file mode 100644
index 0000000..0995031
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/IndirectPriorityQueues.java
@@ -0,0 +1,118 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2003-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+import java.util.Comparator;
+import java.util.NoSuchElementException;
+
+/** A class providing static methods and objects that do useful things with indirect priority queues.
+ *
+ * @see IndirectPriorityQueue
+ */
+
+public class IndirectPriorityQueues {
+
+	private IndirectPriorityQueues() {}
+
+	/** An immutable class representing the empty indirect priority queue.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * {@link IndirectPriorityQueue}.
+	 */
+
+	@SuppressWarnings("rawtypes")
+	public static class EmptyIndirectPriorityQueue extends AbstractIndirectPriorityQueue {
+
+		protected EmptyIndirectPriorityQueue() {}
+
+		public void enqueue( final int i ) { throw new UnsupportedOperationException(); }
+		public int dequeue() { throw new NoSuchElementException(); }
+		public boolean isEmpty() { return true; }
+		public int size() { return 0; }
+		public boolean contains( int index ) { return false; }
+		public void clear() {}
+		public int first() { throw new NoSuchElementException(); }
+		public int last() { throw new NoSuchElementException(); }
+		public void changed() { throw new NoSuchElementException(); }
+		public void allChanged() {}
+		public Comparator<?> comparator() { return null; }
+		public void changed( final int i ) { throw new IllegalArgumentException( "Index " + i + " is not in the queue" ); }
+		public boolean remove( final int i ) { return false; }
+		public int front( int[] a ) { return 0; }
+		
+	}
+
+	/** An empty indirect priority queue (immutable).
+	 */
+
+	public final static EmptyIndirectPriorityQueue EMPTY_QUEUE = new EmptyIndirectPriorityQueue();
+
+
+	/** A synchronized wrapper class for indirect priority queues. */
+
+	public static class SynchronizedIndirectPriorityQueue<K> implements IndirectPriorityQueue<K> {
+		
+		public static final long serialVersionUID = -7046029254386353129L;
+
+		final protected IndirectPriorityQueue<K> q;
+		final protected Object sync;
+
+		protected SynchronizedIndirectPriorityQueue( final IndirectPriorityQueue<K> q, final Object sync ) {
+			this.q = q;
+			this.sync = sync;
+		}
+
+		protected SynchronizedIndirectPriorityQueue( final IndirectPriorityQueue<K> q ) {
+			this.q = q;
+			this.sync = this;
+		}
+
+		public void enqueue( int x ) { synchronized( sync ) { q.enqueue( x ); } }
+		public int dequeue() { synchronized( sync ) { return q.dequeue(); } }
+		public boolean contains( final int index ) { synchronized( sync ) { return q.contains( index ); } }
+		public int first() { synchronized( sync ) { return q.first(); } }
+		public int last() { synchronized( sync ) { return q.last(); } }
+		public boolean isEmpty() { synchronized( sync ) { return q.isEmpty(); } }
+		public int size() { synchronized( sync ) { return q.size(); } }
+		public void clear() { synchronized( sync ) { q.clear(); } }
+		public void changed() { synchronized( sync ) { q.changed(); } }
+		public void allChanged() { synchronized( sync ) { q.allChanged(); } }
+		public void changed( int i ) { synchronized( sync ) { q.changed( i ); } }
+		public boolean remove( int i ) { synchronized( sync ) { return q.remove( i ); } }
+		public Comparator<? super K> comparator() { synchronized( sync ) { return q.comparator(); } }
+		public int front( int[] a ) { return q.front( a ); }
+	}
+
+
+	/** Returns a synchronized type-specific indirect priority queue backed by the specified type-specific indirect priority queue.
+	 *
+	 * @param q the indirect priority queue to be wrapped in a synchronized indirect priority queue.
+	 * @return a synchronized view of the specified indirect priority queue.
+	 */
+	public static <K> IndirectPriorityQueue<K> synchronize( final IndirectPriorityQueue<K> q ) {	return new SynchronizedIndirectPriorityQueue<K>( q ); }
+
+	/** Returns a synchronized type-specific indirect priority queue backed by the specified type-specific indirect priority queue, using an assigned object to synchronize.
+	 *
+	 * @param q the indirect priority queue to be wrapped in a synchronized indirect priority queue.
+	 * @param sync an object that will be used to synchronize the access to the indirect priority queue.
+	 * @return a synchronized view of the specified indirect priority queue.
+	 */
+
+	public static <K> IndirectPriorityQueue<K> synchronize( final IndirectPriorityQueue<K> q, final Object sync ) { return new SynchronizedIndirectPriorityQueue<K>( q, sync ); }
+
+}
diff --git a/src/it/unimi/dsi/fastutil/Maps.java b/src/it/unimi/dsi/fastutil/Maps.java
new file mode 100644
index 0000000..ee72bcd
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/Maps.java
@@ -0,0 +1,36 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2003-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+/** A class providing static methods and objects that do useful things with maps.
+ *
+ * @see java.util.Collections
+ */
+
+public class Maps {
+
+	private Maps() {}
+
+	/** A standard default return value to be used in maps containing <code>null</code> values.
+	 * @deprecated Since fastutil 5.0, the introduction of generics
+	 * makes this object pretty useless.
+	 */
+
+	@Deprecated
+	public static final Object MISSING = new Object();
+}
diff --git a/src/it/unimi/dsi/fastutil/PriorityQueue.java b/src/it/unimi/dsi/fastutil/PriorityQueue.java
new file mode 100644
index 0000000..f3ca07f
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/PriorityQueue.java
@@ -0,0 +1,102 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+import java.util.Comparator;
+import java.util.NoSuchElementException;
+
+/** A priority queue.
+ *
+ * <P>A priority queue provides a way to {@linkplain #enqueue(Object) enqueue}
+ * elements, and to {@linkplain #dequeue() dequeue} them in some specified
+ * order. Elements that are <em>smaller</em> in the specified order are
+ * dequeued first.  It is also possible to get the {@linkplain #first() first
+ * element}, that is, the element that would be dequeued next.
+ *
+ * <P>Additionally, the queue may provide a method to peek at 
+ * element that would be dequeued {@linkplain #last() last}.
+ *
+ * <P>The relative order of the elements enqueued should not change during
+ * queue operations. Nonetheless, some implementations may give the caller a
+ * way to notify the queue that the {@linkplain #changed() first element has
+ * changed its relative position in the order}.
+ */
+
+public interface PriorityQueue<K> {
+
+	/** Enqueues a new element.
+	 *
+	 * @param x the element to enqueue..
+	 */
+
+	void enqueue( K x );
+
+	/** Dequeues the {@linkplain #first() first} element from the queue.
+	 *
+	 * @return the dequeued element.
+	 * @throws NoSuchElementException if the queue is empty.
+	 */
+
+	K dequeue();
+
+	/** Checks whether the queue is empty.
+	 *
+	 * @return true if the queue is empty.
+	 */
+
+	boolean isEmpty();
+
+	/** Returns the number of elements in this queue.
+	 *
+	 * @return the number of elements in this queue.
+	 */
+
+	int size();
+
+	/** Removes all elements from this queue.
+	 */
+
+	void clear();
+
+	/** Returns the first element of the queue.
+	 *
+	 * @return the first element.
+	 * @throws NoSuchElementException if the queue is empty.
+	 */
+
+	K first();
+
+	/** Returns the last element of the queue, that is, the element the would be dequeued last (optional operation).
+	 *
+	 * @return the last element.
+	 * @throws NoSuchElementException if the queue is empty.
+	 */
+
+	K last();
+
+	/** Notifies the queue that the {@linkplain #first() first} element has changed (optional operation).
+	 */
+
+	void changed();
+
+    /** Returns the comparator associated with this queue, or <code>null</code> if it uses its elements' natural ordering.
+	 *
+	 * @return the comparator associated with this sorted set, or <code>null</code> if it uses its elements' natural ordering.
+	 */
+	Comparator<? super K> comparator();
+}
diff --git a/src/it/unimi/dsi/fastutil/PriorityQueues.java b/src/it/unimi/dsi/fastutil/PriorityQueues.java
new file mode 100644
index 0000000..71ede9b
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/PriorityQueues.java
@@ -0,0 +1,109 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2003-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+import java.util.Comparator;
+import java.util.NoSuchElementException;
+
+import it.unimi.dsi.fastutil.PriorityQueue;
+
+/** A class providing static methods and objects that do useful things with priority queues.
+ *
+ * @see it.unimi.dsi.fastutil.PriorityQueue
+ */
+
+public class PriorityQueues {
+
+	private PriorityQueues() {}
+
+	/** An immutable class representing the empty priority queue.
+	 *
+	 * <P>This class may be useful to implement your own in case you subclass
+	 * {@link PriorityQueue}.
+	 */
+
+	@SuppressWarnings("rawtypes")
+	public static class EmptyPriorityQueue extends AbstractPriorityQueue {
+
+		protected EmptyPriorityQueue() {}
+
+		public void enqueue( Object o ) { throw new UnsupportedOperationException(); }
+		public Object dequeue() { throw new NoSuchElementException(); }
+		public boolean isEmpty() { return true; }
+		public int size() { return 0; }
+		public void clear() {}
+		public Object first() { throw new NoSuchElementException(); }
+		public Object last() { throw new NoSuchElementException(); }
+		public void changed() { throw new NoSuchElementException(); }
+		public Comparator<?> comparator() { return null; }
+		
+	}
+
+	/** An empty indirect priority queue (immutable).
+	 */
+
+	public final static EmptyPriorityQueue EMPTY_QUEUE = new EmptyPriorityQueue();
+
+
+	/** A synchronized wrapper class for priority queues. */
+
+	public static class SynchronizedPriorityQueue<K> implements PriorityQueue<K> {
+		
+		public static final long serialVersionUID = -7046029254386353129L;
+
+		final protected PriorityQueue <K> q;
+		final protected Object sync;
+
+		protected SynchronizedPriorityQueue( final PriorityQueue <K> q, final Object sync ) {
+			this.q = q;
+			this.sync = sync;
+		}
+
+		protected SynchronizedPriorityQueue( final PriorityQueue <K> q ) {
+			this.q = q;
+			this.sync = this;
+		}
+
+		public void enqueue( K x ) { synchronized( sync ) { q.enqueue( x ); } }
+		public K dequeue() { synchronized( sync ) { return q.dequeue(); } }
+		public K first() { synchronized( sync ) { return q.first(); } }
+		public K last() { synchronized( sync ) { return q.last(); } }
+		public boolean isEmpty() { synchronized( sync ) { return q.isEmpty(); } }
+		public int size() { synchronized( sync ) { return q.size(); } }
+		public void clear() { synchronized( sync ) { q.clear(); } }
+		public void changed() { synchronized( sync ) { q.changed(); } }
+		public Comparator <? super K> comparator() { synchronized( sync ) { return q.comparator(); } }
+	}
+
+
+	/** Returns a synchronized priority queue backed by the specified priority queue.
+	 *
+	 * @param q the priority queue to be wrapped in a synchronized priority queue.
+	 * @return a synchronized view of the specified priority queue.
+	 */
+	public static <K> PriorityQueue <K> synchronize( final PriorityQueue <K> q ) {	return new SynchronizedPriorityQueue<K>( q ); }
+
+	/** Returns a synchronized priority queue backed by the specified priority queue, using an assigned object to synchronize.
+	 *
+	 * @param q the priority queue to be wrapped in a synchronized priority queue.
+	 * @param sync an object that will be used to synchronize the access to the priority queue.
+	 * @return a synchronized view of the specified priority queue.
+	 */
+
+	public static <K> PriorityQueue <K> synchronize( final PriorityQueue <K> q, final Object sync ) { return new SynchronizedPriorityQueue<K>( q, sync ); }
+}
diff --git a/src/it/unimi/dsi/fastutil/Size64.java b/src/it/unimi/dsi/fastutil/Size64.java
new file mode 100644
index 0000000..8cea41e
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/Size64.java
@@ -0,0 +1,50 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+import java.util.Collection;
+
+/** An interface for data structures whose size can exceed {@link Integer#MAX_VALUE}.
+ *
+ * <P>The only methods specified by this interfaces are {@link #size64()}, and 
+ * a deprecated {@link #size()} identical to {@link Collection#size()}. Implementations
+ * can work around the type problem of {@link java.util.Collection#size()}
+ * (e.g., not being able to return more than {@link Integer#MAX_VALUE}) by implementing this
+ * interface. Callers interested in large structures
+ * can use a reflective call to <code>instanceof</code> to check for the presence of {@link #size64()}.
+ * 
+ * <p>We remark that it is always a good idea to implement both {@link #size()} <em>and</em> {@link #size64()},
+ * as the former might be implemented by a superclass in an incompatible way. If you implement this interface,
+ * just implement {@link #size()} as a <em>deprecated</em> method returning <code>Math.min(Integer.MAX_VALUE, size64())</code>.
+ */
+
+public interface Size64 {
+	/** Returns the size of this data structure as a long.
+	 *
+	 * @return  the size of this data structure.
+	 */
+	long size64();
+
+	/** Returns the size of this data structure, minimized with {@link Integer#MAX_VALUE}.
+	 * 
+	 * @return the size of this data structure, minimized with {@link Integer#MAX_VALUE}.
+	 * @see java.util.Collection#size()
+	 * @deprecated Use {@link #size64()} instead.
+	 */
+	@Deprecated
+	int size();
+}
diff --git a/src/it/unimi/dsi/fastutil/Stack.java b/src/it/unimi/dsi/fastutil/Stack.java
new file mode 100644
index 0000000..5c77753
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/Stack.java
@@ -0,0 +1,72 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2002-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+
+import java.util.NoSuchElementException;
+
+/** A stack.
+ *
+ * <P>A stack must provide the classical {@link #push(Object)} and 
+ * {@link #pop()} operations, but may be also <em>peekable</em>
+ * to some extent: it may provide just the {@link #top()} function,
+ * or even a more powerful {@link #peek(int)} method that provides
+ * access to all elements on the stack (indexed from the top, which
+ * has index 0).
+ */
+
+public interface Stack<K> {
+
+	/** Pushes the given object on the stack.
+	 *
+	 * @param o the object that will become the new top of the stack.
+	 */
+
+	void push( K o );
+
+	/** Pops the top off the stack.
+	 *
+	 * @return the top of the stack.
+	 * @throws NoSuchElementException if the stack is empty.
+	 */
+
+	K pop();
+
+	/** Checks whether the stack is empty.
+	 *
+	 * @return true if the stack is empty.
+	 */
+
+	boolean isEmpty();
+
+	/** Peeks at the top of the stack (optional operation).
+	 *
+	 * @return the top of the stack.
+	 * @throws NoSuchElementException if the stack is empty.
+	 */
+
+	K top();
+
+	/** Peeks at an element on the stack (optional operation).
+	 *
+	 * @return the <code>i</code>-th element on the stack; 0 represents the top.
+	 * @throws IndexOutOfBoundsException if the designated element does not exist..
+	 */
+
+	K peek( int i );
+
+}
diff --git a/src/it/unimi/dsi/fastutil/Swapper.java b/src/it/unimi/dsi/fastutil/Swapper.java
new file mode 100644
index 0000000..5e8c9c6
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/Swapper.java
@@ -0,0 +1,31 @@
+package it.unimi.dsi.fastutil;
+
+/*		 
+ * Copyright (C) 2010-2013 Sebastiano Vigna 
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. 
+ */
+
+/** An object that can swap elements whose position is specified by integers 
+ *
+ * @see Arrays#quickSort(int, int, it.unimi.dsi.fastutil.ints.IntComparator, Swapper)
+ */
+
+public interface Swapper {
+	/** Swaps the data at the given positions.
+	 * 
+	 * @param a the first position to swap.
+	 * @param b the second position to swap.
+	 */
+	void swap( int a, int b );
+}
diff --git a/src/it/unimi/dsi/fastutil/booleans/package.html b/src/it/unimi/dsi/fastutil/booleans/package.html
new file mode 100644
index 0000000..fc7c8fb
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/booleans/package.html
@@ -0,0 +1,16 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for boolean elements or keys.
+
+      	<P>Not all classes are provided in a boolean-specific version: sorted
+      sets and maps not generated (as they are completely
+      useless). Unsorted sets and maps are kept for orthogonality, whereas
+      {@link it.unimi.dsi.fastutil.booleans.BooleanCollection} is used by maps with boolean values.
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/bytes/package.html b/src/it/unimi/dsi/fastutil/bytes/package.html
new file mode 100644
index 0000000..b165b70
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/bytes/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for byte elements or keys.
+
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/chars/package.html b/src/it/unimi/dsi/fastutil/chars/package.html
new file mode 100644
index 0000000..5aaa012
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/chars/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for character elements or keys.
+
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/doubles/package.html b/src/it/unimi/dsi/fastutil/doubles/package.html
new file mode 100644
index 0000000..9d30c8b
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/doubles/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for double elements or keys.
+
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/floats/package.html b/src/it/unimi/dsi/fastutil/floats/package.html
new file mode 100644
index 0000000..f60969c
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/floats/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for float elements or keys.
+
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/ints/package.html b/src/it/unimi/dsi/fastutil/ints/package.html
new file mode 100644
index 0000000..7fe95f1
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/ints/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for integer elements or keys.
+
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/io/FastBufferedInputStream.java b/src/it/unimi/dsi/fastutil/io/FastBufferedInputStream.java
new file mode 100644
index 0000000..7ab3dab
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/FastBufferedInputStream.java
@@ -0,0 +1,551 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2005-2011 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or
+ *  modify it under the terms of the GNU Lesser General Public
+ *  License as published by the Free Software Foundation; either
+ *  version 2.1 of the License, or (at your option) any later version.
+ *
+ *  This library is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  Lesser General Public License for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public
+ *  License along with this library; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ *
+ */
+
+import it.unimi.dsi.fastutil.bytes.ByteArrays;
+import it.unimi.dsi.fastutil.io.RepositionableStream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.channels.FileChannel;
+import java.util.EnumSet;
+
+/** Lightweight, unsynchronized, aligned input stream buffering class with
+ *  {@linkplain #skip(long) true skipping},
+ *  {@linkplain MeasurableStream measurability}, 
+ *  {@linkplain RepositionableStream repositionability} 
+ *  and {@linkplain #readLine(byte[], int, int, EnumSet) line reading} support.
+ *
+ * <P>This class provides buffering for input streams, but it does so with 
+ * purposes and an internal logic that are radically different from the ones
+ * adopted in {@link java.io.BufferedInputStream}. The main features follow.
+ * 
+ * <ul>
+ * <li><P>There is no support for marking. All methods are unsychronized.
+ * 
+ * <li><P>As an additional feature, this class implements the {@link
+ * RepositionableStream} and {@link MeasurableStream} interfaces.  
+ * An instance of this class will try to cast
+ * the underlying byte stream to a {@link RepositionableStream} and to fetch by
+ * reflection the {@link java.nio.channels.FileChannel} underlying the given
+ * output stream, in this order. If either reference can be successfully
+ * fetched, you can use {@link #position(long)} to reposition the stream.
+ * Much in the same way, an instance of this class will try to cast the
+ * the underlying byte stream to a {@link MeasurableStream}, and if this
+ * operation is successful, or if a {@link java.nio.channels.FileChannel} can
+ * be detected, then {@link #position()} and {@link #length()} will work as expected.
+ * 
+ * 
+ * <li><p>Due to erratic and unpredictable behaviour of {@link InputStream#skip(long)},
+ * which does not correspond to its specification and which Sun refuses to fix
+ * (see <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6222822">bug 6222822</a>;
+ * don't be fooled by the “closed, fixed” label),
+ * this class peeks at the underlying stream and if it is {@link System#in} it uses
+ * repeated reads instead of calling {@link InputStream#skip(long)} on the underlying stream; moreover,
+ * skips and reads are tried alternately, so to guarantee that skipping
+ * less bytes than requested can be caused only by reaching the end of file.
+ *
+ * <li><p>This class keeps also track of the number of bytes read so far, so
+ * to be able to implemented {@link MeasurableStream#position()}
+ * independently of underlying input stream.
+ * 
+ * <li><p>This class has limited support for 
+ * {@linkplain #readLine(byte[], int, int, EnumSet) “reading a line”}
+ * (whatever that means) from the underlying input stream. You can choose the set of
+ * {@linkplain FastBufferedInputStream.LineTerminator line terminators} that
+ * delimit lines.
+ *
+ * </ul>
+ * 
+ * <p><strong>Warning:</strong> Since <code>fastutil</code> 6.0.0, this class detects
+ * a implementations of {@link MeasurableStream} instead of subclasses <code>MeasurableInputStream</code> (which is deprecated).
+ * 
+ * @since 4.4
+ */
+
+public class FastBufferedInputStream extends MeasurableInputStream implements RepositionableStream {
+
+	/** The default size of the internal buffer in bytes (8Ki). */
+	public final static int DEFAULT_BUFFER_SIZE = 8 * 1024;
+
+	/** An enumeration of the supported line terminators. */
+	public static enum LineTerminator {
+		/** A carriage return (CR, ASCII 13). */
+		CR,
+		/** A line feed (LF, ASCII 10). */
+		LF,
+		/** A carriage return followed by a line feed (CR/LF, ASCII 13/10). */
+		CR_LF
+	}
+
+	/** A set containing <em>all available</em> line terminators. */
+	public final static EnumSet<LineTerminator> ALL_TERMINATORS = EnumSet.allOf( LineTerminator.class );
+	
+	/** The underlying input stream. */
+	protected InputStream is;
+
+	/** The internal buffer. */
+	protected byte buffer[];
+
+	/** The current position in the buffer. */
+	protected int pos;
+
+	/** The number of bytes ever read (reset upon a call to {@link #position(long)}).
+	 * In particular, this will always represent the index (in the underlying input stream)
+	 * of the first available byte in the buffer. */
+	protected long readBytes;
+
+	/** The number of buffer bytes available starting from {@link #pos}. */
+	protected int avail;
+
+	/** The cached file channel underlying {@link #is}, if any. */
+	private FileChannel fileChannel;
+
+	/** {@link #is} cast to a positionable stream, if possible. */
+	private RepositionableStream repositionableStream;
+
+	/** {@link #is} cast to a measurable stream, if possible. */
+	private MeasurableStream measurableStream;
+
+	private static int ensureBufferSize( final int bufferSize ) {
+		if ( bufferSize <= 0 ) throw new IllegalArgumentException( "Illegal buffer size: " + bufferSize );
+		return bufferSize;
+	}
+		
+	/** Creates a new fast buffered input stream by wrapping a given input stream with a given buffer. 
+	 *
+	 * @param is an input stream to wrap.
+	 * @param buffer a buffer of positive length.
+	 */
+	public FastBufferedInputStream( final InputStream is, final byte[] buffer ) {
+		this.is = is;
+		ensureBufferSize( buffer.length );
+		this.buffer = buffer;
+
+		if ( is instanceof RepositionableStream ) repositionableStream = (RepositionableStream)is;
+		if ( is instanceof MeasurableStream ) measurableStream = (MeasurableStream)is;
+			
+		if ( repositionableStream == null ) {
+				
+			try {
+				fileChannel = (FileChannel)( is.getClass().getMethod( "getChannel", new Class[] {} ) ).invoke( is, new Object[] {} );
+			}
+			catch( IllegalAccessException e ) {}
+			catch( IllegalArgumentException e ) {}
+			catch( NoSuchMethodException e ) {}
+			catch( java.lang.reflect.InvocationTargetException e ) {}
+			catch( ClassCastException e ) {}
+		}
+	}
+
+	/** Creates a new fast buffered input stream by wrapping a given input stream with a given buffer size. 
+	 *
+	 * @param is an input stream to wrap.
+	 * @param bufferSize the size in bytes of the internal buffer (greater than zero).
+	 */
+	public FastBufferedInputStream( final InputStream is, final int bufferSize ) {
+		this( is, new byte[ ensureBufferSize( bufferSize ) ] );		
+	}
+	
+	/** Creates a new fast buffered input stream by wrapping a given input stream with a buffer of {@link #DEFAULT_BUFFER_SIZE} bytes. 
+	 *
+	 * @param is an input stream to wrap.
+	 */
+	public FastBufferedInputStream( final InputStream is ) {
+		this( is, DEFAULT_BUFFER_SIZE );
+	}
+
+	/** Checks whether no more bytes will be returned.
+	 * 
+	 * <p>This method will refill the internal buffer.
+	 * 
+	 * @return true if there are no characters in the internal buffer and
+	 * the underlying reader is exhausted.
+	 */
+	
+	protected boolean noMoreCharacters() throws IOException {
+		if ( avail == 0 ) {
+			avail = is.read( buffer );
+			if ( avail <= 0 ) {
+				avail = 0;
+				return true;
+			}
+			pos = 0;
+		}
+		return false;
+	}
+	
+
+	
+	public int read() throws IOException {
+		if ( noMoreCharacters() ) return -1;
+		avail--;
+		readBytes++;
+		return buffer[ pos++ ] & 0xFF;
+	}
+
+
+	public int read( final byte b[], final int offset, final int length ) throws IOException {
+		if ( length <= avail ) {
+			System.arraycopy( buffer, pos, b, offset, length );
+			pos += length;
+			avail -= length;
+			readBytes += length;
+			return length;
+		}
+	
+		final int head = avail;
+		
+		System.arraycopy( buffer, pos, b, offset, head );
+		pos = avail = 0;
+		readBytes += head;
+		
+		if ( length > buffer.length ) {
+			// We read directly into the destination
+			final int result = is.read( b, offset + head, length - head );
+			if ( result > 0 ) readBytes += result;
+			return result < 0 ? ( head == 0 ? -1 : head ) : result + head;
+		}
+		
+		if ( noMoreCharacters() ) return head == 0 ? -1 : head;
+		
+		final int toRead = Math.min( length - head, avail );
+		readBytes += toRead;
+		System.arraycopy( buffer, 0, b, offset + head, toRead );
+		pos = toRead;
+		avail -= toRead;
+		
+		// Note that head >= 0, and necessarily toRead > 0
+		return toRead + head;
+	}
+
+	/** Reads a line into the given byte array using {@linkplain #ALL_TERMINATORS all terminators}.
+	 *
+	 * @param array byte array where the next line will be stored.
+	 * @return the number of bytes actually placed in <code>array</code>, or -1 at end of file.
+	 * @see #readLine(byte[], int, int, EnumSet)
+	 */
+
+	public int readLine( final byte[] array ) throws IOException {
+		return readLine( array, 0, array.length, ALL_TERMINATORS );
+	}
+
+	/** Reads a line into the given byte array.
+	 *
+	 * @param array byte array where the next line will be stored.
+	 * @param terminators a set containing the line termination sequences that we want
+	 * to consider as valid.
+	 * @return the number of bytes actually placed in <code>array</code>, or -1 at end of file.
+	 * @see #readLine(byte[], int, int, EnumSet)
+	 */
+
+	public int readLine( final byte[] array, final EnumSet<LineTerminator> terminators ) throws IOException {
+		return readLine( array, 0, array.length, terminators );
+	}
+
+	/** Reads a line into the given byte-array fragment	using {@linkplain #ALL_TERMINATORS all terminators}.
+	 *
+	 * @param array byte array where the next line will be stored.
+	 * @param off the first byte to use in <code>array</code>.
+	 * @param len the maximum number of bytes to read.
+	 * @return the number of bytes actually placed in <code>array</code>, or -1 at end of file.
+	 * @see #readLine(byte[], int, int, EnumSet)
+	 */
+	public int readLine( final byte[] array, final int off, final int len ) throws IOException {
+		return readLine( array, off, len, ALL_TERMINATORS );
+	}
+
+	/** Reads a line into the given byte-array fragment.
+	 *
+	 * <P>Reading lines (i.e., characters) out of a byte stream is not always sensible
+	 * (methods available to that purpose in old versions of Java have been mercilessly deprecated).
+	 * Nonetheless, in several situations, such as when decoding network protocols or headers
+	 * known to be ASCII, it is very useful to be able to read a line from a byte stream.
+	 * 
+	 * <p>This method will attempt to read the next line into <code>array</code> starting at <code>off</code>,
+	 * reading at most <code>len</code> bytes. The read, however, will be stopped by the end of file or
+	 * when meeting a {@linkplain LineTerminator <em>line terminator</em>}. Of course, for this operation
+	 * to be sensible the encoding of the text contained in the stream, if any, must not generate spurious
+	 * carriage returns or line feeds. Note that the termination detection uses a maximisation
+	 * criterion, so if you specify both {@link LineTerminator#CR} and
+	 * {@link LineTerminator#CR_LF} meeting a pair CR/LF will consider the whole pair a terminator.
+	 * 
+	 * <p>Terminators are <em>not</em> copied into <em>array</em> or included in the returned count. The
+	 * returned integer can be used to check whether the line is complete: if it is smaller than
+	 * <code>len</code>, then more bytes might be available, but note that this method (contrarily
+	 * to {@link #read(byte[], int, int)}) can legitimately return zero when <code>len</code>
+	 * is nonzero just because a terminator was found as the first character. Thus, the intended
+	 * usage of this method is to call it on a given array, check whether <code>len</code> bytes
+	 * have been read, and if so try again (possibly extending the array) until a number of read bytes
+	 * strictly smaller than <code>len</code> (possibly, -1) is returned.
+	 * 
+	 * <p>If you need to guarantee that a full line is read, use the following idiom:
+	 * <pre>
+	 * int start = off, len;
+	 * while( ( len = fbis.readLine( array, start, array.length - start, terminators ) ) == array.length - start ) {
+	 *     start += len;
+	 *     array = ByteArrays.grow( array, array.length + 1 );
+	 * }
+	 * </pre>
+	 *
+	 * <p>At the end of the loop, the line will be placed in <code>array</code> starting at
+	 * <code>off</code> (inclusive) and ending at <code>start + Math.max( len, 0 )</code> (exclusive).
+	 *
+	 * @param array byte array where the next line will be stored.
+	 * @param off the first byte to use in <code>array</code>.
+	 * @param len the maximum number of bytes to read.
+	 * @param terminators a set containing the line termination sequences that we want
+	 * to consider as valid.
+	 * @return the number of bytes actually placed in <code>array</code>, or -1 at end of file.
+	 * Note that the returned number will be <code>len</code> if no line termination sequence 
+	 * specified in <code>terminators</code> has been met before scanning <code>len</code> byte,
+	 * and if also we did not meet the end of file. 
+	 */
+
+	public int readLine( final byte[] array, final int off, final int len, final EnumSet<LineTerminator> terminators ) throws IOException {
+		ByteArrays.ensureOffsetLength( array ,off, len );
+		if ( len == 0 ) return 0; // 0-length reads always return 0
+		if ( noMoreCharacters() ) return -1;
+		int i, k = 0, remaining = len, read = 0; // The number of bytes still to be read
+		for(;;) {
+			for( i = 0; i < avail && i < remaining && ( k = buffer[ pos + i ] ) != '\n' && k != '\r' ; i++ );
+			System.arraycopy( buffer, pos, array, off + read, i );
+			pos += i; 
+			avail -= i;
+			read += i;
+			remaining -= i;
+			if ( remaining == 0 ) {
+				readBytes += read;
+				return read; // We did not stop because of a terminator
+			}
+			
+			if ( avail > 0 ) { // We met a terminator
+				if ( k == '\n' ) { // LF first
+					pos++;
+					avail--;
+					if ( terminators.contains( LineTerminator.LF ) ) {
+						readBytes += read + 1;
+						return read;
+					}
+					else {
+						array[ off + read++ ] = '\n';
+						remaining--;
+					}
+				}
+				else if ( k == '\r' ) { // CR first
+					pos++;
+					avail--;
+					
+					if ( terminators.contains( LineTerminator.CR_LF ) ) {
+						if ( avail > 0 ) {
+							if ( buffer[ pos ] == '\n' ) { // CR/LF with LF already in the buffer.
+								pos ++;
+								avail--;
+								readBytes += read + 2;
+								return read;
+							}
+						}
+						else { // We must search for the LF.
+							if ( noMoreCharacters() ) {
+								// Not found a matching LF because of end of file, will return CR in buffer if not a terminator
+
+								if ( ! terminators.contains( LineTerminator.CR ) ) {
+									array[ off + read++ ] = '\r';
+									remaining--;
+									readBytes += read;
+								}
+								else readBytes += read + 1;
+								
+								return read;
+							}
+							if ( buffer[ 0 ] == '\n' ) {
+								// Found matching LF, won't return terminators in the buffer
+								pos++;
+								avail--;
+								readBytes += read + 2;
+								return read;
+							}
+						}
+					}
+					
+					if ( terminators.contains( LineTerminator.CR ) ) {
+						readBytes += read + 1;
+						return read;
+					}
+					
+					array[ off + read++ ] = '\r';
+					remaining--;
+				}
+			}
+			else if ( noMoreCharacters() ) {
+				readBytes += read;
+				return read;
+			}
+		}
+	}
+
+	
+
+	public void position( long newPosition ) throws IOException {
+
+		final long position = readBytes;
+
+		/** Note that this check will succeed also in the case of
+		 * an empty buffer and position == newPosition. This behaviour is
+		 * intentional, as it delays buffering to when it is actually
+		 * necessary and avoids useless class the underlying stream. */
+		
+		if ( newPosition <= position + avail && newPosition >= position - pos ) {
+			pos += newPosition - position;
+			avail -= newPosition - position;
+			readBytes = newPosition;
+			return;
+		}
+
+		if ( repositionableStream != null ) repositionableStream.position( newPosition  );
+		else if ( fileChannel != null ) fileChannel.position( newPosition );
+		else throw new UnsupportedOperationException( "position() can only be called if the underlying byte stream implements the RepositionableStream interface or if the getChannel() method of the underlying byte stream exists and returns a FileChannel" );
+		readBytes = newPosition;
+
+		avail = pos = 0;
+	}
+
+	public long position() throws IOException {
+		return readBytes;
+	}
+
+	/** Returns the length of the underlying input stream, if it is {@linkplain MeasurableStream measurable}.
+	 *
+	 * @return the length of the underlying input stream.
+	 * @throws UnsupportedOperationException if the underlying input stream is not {@linkplain MeasurableStream measurable} and
+	 * cannot provide a {@link FileChannel}.
+	 */
+
+	public long length() throws IOException {
+		if ( measurableStream != null ) return measurableStream.length();
+		if ( fileChannel != null ) return fileChannel.size();
+		throw new UnsupportedOperationException();
+	}
+
+
+	/** Skips the given amount of bytes by repeated reads.
+	 *
+	 * <strong>Warning</strong>: this method uses destructively the internal buffer.
+	 *
+	 * @param n the number of bytes to skip.
+	 * @return the number of bytes actually skipped.
+	 * @see InputStream#skip(long)
+	 */
+
+	private long skipByReading( final long n ) throws IOException {
+		long toSkip = n;
+		int len;
+		while( toSkip > 0 ) {
+			len = is.read( buffer, 0, (int)Math.min( buffer.length, toSkip ) );
+			if ( len > 0 ) toSkip -= len;
+			else break;
+		}
+
+		return n - toSkip;
+	}
+
+
+	/** Skips over and discards the given number of bytes of data from this fast buffered input stream. 
+	 *
+	 * <p>As explained in the {@linkplain FastBufferedInputStream class documentation}, the semantics
+	 * of {@link InputStream#skip(long)} is fatally flawed. This method provides additional semantics as follows:
+	 * it will skip the provided number of bytes, unless the end of file has been reached.
+	 *
+	 * <p>Additionally, if the underlying input stream is {@link System#in} this method will use
+	 * repeated reads instead of invoking {@link InputStream#skip(long)}.
+	 *
+	 * @param n the number of bytes to skip.
+	 * @return the number of bytes actually skipped; it can be smaller than <code>n</code>
+	 * only if the end of file has been reached.
+	 * @see InputStream#skip(long)
+	 */
+
+	public long skip( final long n ) throws IOException {
+		if ( n <= avail ) {
+			final int m = (int)n;
+			pos += m;
+			avail -= m;
+			readBytes += n;
+			return n;
+		}
+
+		long toSkip = n - avail, result = 0;
+		avail = 0;
+
+		while ( toSkip != 0 && ( result = is == System.in ? skipByReading( toSkip ) : is.skip( toSkip ) ) < toSkip ) {
+			if ( result == 0 ) {
+				if ( is.read() == -1 ) break;
+				toSkip--;
+			}
+			else toSkip -= result;
+		}
+
+		final long t = n - ( toSkip - result );
+		readBytes += t;
+		return t;
+	}
+
+
+	public int available() throws IOException {
+		return (int)Math.min( is.available() + (long)avail, Integer.MAX_VALUE );
+	}
+
+	public void close() throws IOException {
+		if ( is == null ) return;
+		if ( is != System.in ) is.close();
+		is = null;
+		buffer = null;
+	}
+
+	/** Resets the internal logic of this fast buffered input stream, clearing the buffer. 
+	 *
+	 * <p>All buffering information is discarded, and the number of bytes read so far
+	 * (and thus, also the {@linkplain #position() current position})
+	 * is adjusted to reflect this fact. 
+	 *  
+	 * <p>This method is mainly useful for re-reading 
+	 * files that have been overwritten externally. 
+	 */
+
+	public void flush() {
+		if ( is == null ) return;
+		readBytes += avail; 
+		avail = pos = 0;
+	}
+
+	/** Resets the internal logic of this fast buffered input stream.
+	 * 
+	 * @deprecated As of <samp>fastutil</samp> 5.0.4, replaced by {@link #flush()}. The old
+	 * semantics of this method does not contradict {@link InputStream}'s contract, as
+	 * the semantics of {@link #reset()} is undefined if {@link InputStream#markSupported()}
+	 * returns false. On the other hand, the name was really a poor choice.
+	 */
+	@Deprecated
+	public void reset() {
+		flush();
+	}
+}
diff --git a/src/it/unimi/dsi/fastutil/io/FastBufferedOutputStream.java b/src/it/unimi/dsi/fastutil/io/FastBufferedOutputStream.java
new file mode 100644
index 0000000..77e09f5
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/FastBufferedOutputStream.java
@@ -0,0 +1,217 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2005-2011 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or
+ *  modify it under the terms of the GNU Lesser General Public
+ *  License as published by the Free Software Foundation; either
+ *  version 2.1 of the License, or (at your option) any later version.
+ *
+ *  This library is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  Lesser General Public License for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public
+ *  License along with this library; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ *
+ */
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.channels.FileChannel;
+
+/** Lightweight, unsynchronized output stream buffering class with
+ *  {@linkplain MeasurableStream measurability} and
+ *  {@linkplain RepositionableStream repositionability}.
+ *
+ * <P>This class provides buffering for output streams, but it does so with 
+ * purposes and an internal logic that are radically different from the ones
+ * adopted in {@link java.io.BufferedOutputStream}. The main features follow.
+ * 
+ * <ul>
+ * <li><P>All methods are unsychronized.
+ * 
+ * <li><P>As an additional feature, this class implements the {@link
+ * RepositionableStream} and {@link MeasurableStream} interfaces.  
+ * An instance of this class will try to cast
+ * the underlying byte stream to a {@link RepositionableStream} and to fetch by
+ * reflection the {@link java.nio.channels.FileChannel} underlying the given
+ * output stream, in this order. If either reference can be successfully
+ * fetched, you can use {@link #position(long)} to reposition the stream.
+ * Much in the same way, an instance of this class will try to cast the
+ * the underlying byte stream to a {@link MeasurableStream}, and if this
+ * operation is successful, or if a {@link java.nio.channels.FileChannel} can
+ * be detected, then {@link #position()} and {@link #length()} will work as expected.
+ * </ul>
+ * @since 4.4
+ */
+
+public class FastBufferedOutputStream extends MeasurableOutputStream implements RepositionableStream {
+	private static final boolean ASSERTS = false;
+	
+	/** The default size of the internal buffer in bytes (8Ki). */
+	public final static int DEFAULT_BUFFER_SIZE = 8 * 1024;
+
+	/** The internal buffer. */
+	protected byte buffer[];
+
+	/** The current position in the buffer. */
+	protected int pos;
+
+	/** The number of buffer bytes available starting from {@link #pos} 
+	 * (it must be always equal to <code>buffer.length - pos</code>). */
+	protected int avail;
+
+	/** The underlying output stream. */
+	protected OutputStream os;
+
+	/** The cached file channel underlying {@link #os}, if any. */
+	private FileChannel fileChannel;
+
+	/** {@link #os} cast to a positionable stream, if possible. */
+	private RepositionableStream repositionableStream;
+
+	/** {@link #os} cast to a measurable stream, if possible. */
+	private MeasurableStream measurableStream;
+
+	private static int ensureBufferSize( final int bufferSize ) {
+		if ( bufferSize <= 0 ) throw new IllegalArgumentException( "Illegal buffer size: " + bufferSize );
+		return bufferSize;
+	}
+		
+	/** Creates a new fast buffered output stream by wrapping a given output stream with a given buffer. 
+	 *
+	 * @param os an output stream to wrap.
+	 * @param buffer a buffer of positive length.
+	 */
+	public FastBufferedOutputStream( final OutputStream os, final byte[] buffer ) {
+		this.os = os;
+		ensureBufferSize( buffer.length );
+		this.buffer = buffer;
+		avail = buffer.length;
+		
+		if ( os instanceof RepositionableStream ) repositionableStream = (RepositionableStream)os;
+		if ( os instanceof MeasurableStream ) measurableStream = (MeasurableStream)os;
+			
+		if ( repositionableStream == null ) {
+				
+			try {
+				fileChannel = (FileChannel)( os.getClass().getMethod( "getChannel", new Class[] {} ) ).invoke( os, new Object[] {} );
+			}
+			catch( IllegalAccessException e ) {}
+			catch( IllegalArgumentException e ) {}
+			catch( NoSuchMethodException e ) {}
+			catch( java.lang.reflect.InvocationTargetException e ) {}
+			catch( ClassCastException e ) {}
+		}
+
+	}
+
+	/** Creates a new fast buffered output stream by wrapping a given output stream with a given buffer size. 
+	 *
+	 * @param os an output stream to wrap.
+	 * @param bufferSize the size in bytes of the internal buffer.
+	 */
+	public FastBufferedOutputStream( final OutputStream os, final int bufferSize ) {
+		this( os, new byte[ ensureBufferSize( bufferSize ) ] );
+	}
+
+	/** Creates a new fast buffered ouptut stream by wrapping a given output stream with a buffer of {@link #DEFAULT_BUFFER_SIZE} bytes. 
+	 *
+	 * @param os an output stream to wrap.
+	 */
+	public FastBufferedOutputStream( final OutputStream os ) {
+		this( os, DEFAULT_BUFFER_SIZE );
+	}
+
+	private void dumpBuffer( final boolean ifFull ) throws IOException {
+		if ( ! ifFull || avail == 0 ) {
+			os.write( buffer, 0, pos );
+			pos = 0;
+			avail = buffer.length;
+		}
+	}
+
+	public void write( final int b ) throws IOException {
+		if ( ASSERTS ) assert avail > 0;
+		avail--;
+		buffer[ pos++ ] = (byte)b;
+		dumpBuffer( true );
+	}
+
+
+	public void write( final byte b[], final int offset, final int length ) throws IOException {
+		if ( length >= buffer.length ) {
+			dumpBuffer( false );
+			os.write( b, offset, length );
+			return;
+		}
+		
+		if ( length <= avail ) {
+			// Copy in buffer
+			System.arraycopy( b, offset, buffer, pos, length );
+			pos += length;
+			avail -= length;
+			dumpBuffer( true );
+			return;
+		}
+		
+		dumpBuffer( false );
+		System.arraycopy( b, offset, buffer, 0, length );
+		pos = length;
+		avail -= length;
+	}
+
+	public void flush() throws IOException {
+		dumpBuffer( false );
+		os.flush();
+	}
+	
+	public void close() throws IOException {
+		if ( os == null ) return;
+		flush();
+		if ( os != System.out ) os.close();
+		os = null;
+		buffer = null;
+	}
+	
+	public long position() throws IOException {
+		if ( repositionableStream != null ) return repositionableStream.position() + pos;
+		else if ( measurableStream != null ) return measurableStream.position() + pos;
+		else if ( fileChannel != null ) return fileChannel.position() + pos;
+		else throw new UnsupportedOperationException( "position() can only be called if the underlying byte stream implements the MeasurableStream or RepositionableStream interface or if the getChannel() method of the underlying byte stream exists and returns a FileChannel" );
+	}
+
+	/** Repositions the stream.
+	 * 
+	 * <p>Note that this method performs a {@link #flush()} before changing the underlying stream position.
+	 */
+	
+	public void position( final long newPosition ) throws IOException {
+		flush();
+		if ( repositionableStream != null ) repositionableStream.position( newPosition  );
+		else if ( fileChannel != null ) fileChannel.position( newPosition );
+		else throw new UnsupportedOperationException( "position() can only be called if the underlying byte stream implements the RepositionableStream interface or if the getChannel() method of the underlying byte stream exists and returns a FileChannel" );
+	}
+
+	/** Returns the length of the underlying output stream, if it is {@linkplain MeasurableStream measurable}.
+	 *
+	 * <p>Note that this method performs a {@link #flush()} before detecting the length.
+	 * 
+	 * @return the length of the underlying output stream.
+	 * @throws UnsupportedOperationException if the underlying output stream is not {@linkplain MeasurableStream measurable} and
+	 * cannot provide a {@link FileChannel}.
+	 */
+
+	public long length() throws IOException {
+		flush();
+		if ( measurableStream != null ) return measurableStream.length();
+		if ( fileChannel != null ) return fileChannel.size();
+		throw new UnsupportedOperationException();
+	}
+}
diff --git a/src/it/unimi/dsi/fastutil/io/FastByteArrayInputStream.java b/src/it/unimi/dsi/fastutil/io/FastByteArrayInputStream.java
new file mode 100644
index 0000000..1e406d3
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/FastByteArrayInputStream.java
@@ -0,0 +1,131 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2003-2011 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or modify it
+ *  under the terms of the GNU Lesser General Public License as published by the Free
+ *  Software Foundation; either version 2.1 of the License, or (at your option)
+ *  any later version.
+ *
+ *  This library is distributed in the hope that it will be useful, but
+ *  WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ *  or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+ *  for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public License
+ *  along with this program; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ *
+ */
+
+/** Simple, fast and repositionable byte-array input stream.
+ *
+ * <p><strong>Warning</strong>: this class implements the correct semantics
+ * of {@link #read(byte[], int, int)} as described in {@link java.io.InputStream}.
+ * The implementation given in {@link java.io.ByteArrayInputStream} is broken,
+ * but it will never be fixed because it's too late.
+ *
+ * @author Sebastiano Vigna
+ */
+
+public class FastByteArrayInputStream extends MeasurableInputStream implements RepositionableStream {
+
+	/** The array backing the input stream. */
+	public byte[] array;
+
+	/** The first valid entry. */
+	public int offset;
+
+	/** The number of valid bytes in {@link #array} starting from {@link #offset}. */
+	public int length;
+
+	/** The current position as a distance from {@link #offset}. */
+	private int position;
+
+	/** The current mark as a position, or -1 if no mark exists. */
+	private int mark;
+
+	/** Creates a new array input stream using a given array fragment.
+	 *
+	 * @param array the backing array.
+	 * @param offset the first valid entry of the array.
+	 * @param length the number of valid bytes.
+	 */
+	public FastByteArrayInputStream( final byte[] array, final int offset, final int length ) {
+		this.array = array;
+		this.offset = offset;
+		this.length = length;
+	}
+
+	/** Creates a new array input stream using a given array. 
+	 *
+	 * @param array the backing array.
+	 */
+	public FastByteArrayInputStream( final byte[] array ) {
+		this( array, 0, array.length );
+	}
+
+	public boolean markSupported() {
+		return true;
+	}
+
+	public void reset() {
+		position = mark;
+	}
+
+	/** Closing a fast byte array input stream has no effect. */
+	public void close() {}
+
+	public void mark( final int dummy ) {
+		mark = position;
+	}
+
+	public int available() {
+		return length - position;
+	}
+
+	public long skip( long n ) {
+		if ( n <= length - position ) {
+			position += (int)n;
+			return n;
+		}
+		n = length - position;
+		position = length;
+		return n;
+	}
+
+	public int read() {
+		if ( length == position ) return -1;
+		return array[ offset + position++ ] & 0xFF;
+	}
+
+	/** Reads bytes from this byte-array input stream as 
+	 * specified in {@link java.io.InputStream#read(byte[], int, int)}.
+	 * Note that the implementation given in {@link java.io.ByteArrayInputStream#read(byte[], int, int)}
+	 * will return -1 on a zero-length read at EOF, contrarily to the specification. We won't.
+	 */
+	
+	public int read( final byte b[], final int offset, final int length ) {
+		if ( this.length == this.position ) return length == 0 ? 0 : -1;
+		final int n = Math.min( length, this.length - this.position );
+		System.arraycopy( array, this.offset + this.position, b, offset, n );
+		this.position += n;
+		return n;
+	}
+
+	public long position() {
+		return position;
+	}
+
+	public void position( final long newPosition ) {
+		position = (int)Math.min( newPosition, length );
+	}
+
+	@Override
+	public long length() {
+		return length;
+	}
+}
diff --git a/src/it/unimi/dsi/fastutil/io/FastByteArrayOutputStream.java b/src/it/unimi/dsi/fastutil/io/FastByteArrayOutputStream.java
new file mode 100644
index 0000000..d1e37f8
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/FastByteArrayOutputStream.java
@@ -0,0 +1,113 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2003-2011 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or modify it
+ *  under the terms of the GNU Lesser General Public License as published by the Free
+ *  Software Foundation; either version 2.1 of the License, or (at your option)
+ *  any later version.
+ *
+ *  This library is distributed in the hope that it will be useful, but
+ *  WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ *  or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+ *  for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public License
+ *  along with this program; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ *
+ */
+
+import it.unimi.dsi.fastutil.bytes.ByteArrays;
+
+import java.io.IOException;
+
+/** Simple, fast byte-array output stream that exposes the backing array.
+ *
+ * <P>{@link java.io.ByteArrayOutputStream} is nice, but to get its content you
+ * must generate each time a new object. This doesn't happen here.
+ *
+ * <P>This class will automatically enlarge the backing array, doubling its
+ * size whenever new space is needed. The {@link #reset()} method will
+ * mark the content as empty, but will not decrease the capacity: use 
+ * {@link #trim()} for that purpose.
+ *
+ * @author Sebastiano Vigna
+ */
+
+public class FastByteArrayOutputStream extends MeasurableOutputStream implements RepositionableStream {
+
+	/** The array backing the output stream. */
+	public final static int DEFAULT_INITIAL_CAPACITY = 16;
+
+	/** The array backing the output stream. */
+	public byte[] array;
+
+	/** The number of valid bytes in {@link #array}. */
+	public int length;
+
+	/** The current writing position. */
+	private int position;
+
+	/** Creates a new array output stream with an initial capacity of {@link #DEFAULT_INITIAL_CAPACITY} bytes. */
+	public FastByteArrayOutputStream() {
+		this( DEFAULT_INITIAL_CAPACITY );
+	}
+
+	/** Creates a new array output stream with a given initial capacity.
+	 *
+	 * @param initialCapacity the initial length of the backing array.
+	 */
+	public FastByteArrayOutputStream( final int initialCapacity ) {
+		array = new byte[ initialCapacity ];
+	}
+
+	/** Creates a new array output stream wrapping a given byte array.
+	 *
+	 * @param a the byte array to wrap.
+	 */
+	public FastByteArrayOutputStream( final byte[] a ) {
+		array = a;
+	}
+
+	/** Marks this array output stream as empty. */
+	public void reset() {
+		length = 0;
+		position = 0;
+	}
+
+	/** Ensures that the length of the backing array is equal to {@link #length}. */
+	public void trim() {
+		array = ByteArrays.trim( array, length );
+	}
+
+	public void write( final int b ) {
+		if ( position >= array.length ) array = ByteArrays.grow( array, position + 1, length );
+		array[ position++ ] = (byte)b;
+		if ( length < position ) length = position;
+	}
+
+	public void write( final byte[] b, final int off, final int len ) throws IOException {
+		ByteArrays.ensureOffsetLength( b, off, len );
+		if ( position + len > array.length ) array = ByteArrays.grow( array, position + len, position );
+		System.arraycopy( b, off, array, position, len );
+		if ( position + len > length ) length = position += len;
+	}
+
+	public void position( long newPosition ) {
+		if ( position > Integer.MAX_VALUE ) throw new IllegalArgumentException( "Position too large: " + newPosition );
+		position = (int)newPosition;
+	}
+
+	public long position() {
+		return position;
+	}
+
+	@Override
+	public long length() throws IOException {
+		return length;
+	}
+}
diff --git a/src/it/unimi/dsi/fastutil/io/FastMultiByteArrayInputStream.java b/src/it/unimi/dsi/fastutil/io/FastMultiByteArrayInputStream.java
new file mode 100644
index 0000000..669b791
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/FastMultiByteArrayInputStream.java
@@ -0,0 +1,184 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2003-2011 Paolo Boldi and Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or modify it
+ *  under the terms of the GNU Lesser General Public License as published by the Free
+ *  Software Foundation; either version 2.1 of the License, or (at your option)
+ *  any later version.
+ *
+ *  This library is distributed in the hope that it will be useful, but
+ *  WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ *  or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
+ *  for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public License
+ *  along with this program; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ *
+ */
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+
+
+/** Simple, fast and repositionable byte array input stream that multiplexes its content among several arrays.
+ * 
+ * This class is significantly slower than {@link FastByteArrayInputStream},
+ * but it can hold 256 PiB of data. The relevant constructor is {@link #FastMultiByteArrayInputStream(InputStream, long)},
+ * which fetches a stream and loads it into a sequence of byte arrays.
+ *
+ * @author Sebastiano Vigna
+ * @author Paolo Boldi
+s */
+
+public class FastMultiByteArrayInputStream extends MeasurableInputStream implements RepositionableStream {
+
+	/** The number of bits of an array slice index. */
+	public final static int SLICE_BITS = 30;
+
+	/** The maximum length of an array slice. */
+	public final static int SLICE_SIZE = 1 << SLICE_BITS;
+	
+	/** The mask to retrieve a slice offset. */
+	public final static int SLICE_MASK = SLICE_SIZE - 1;
+
+	/** The array of arrays backing the input stream. */
+	public byte[][] array;
+
+	/** The number of valid bytes in {@link #array}. */
+	public long length;
+
+	/** The current position. */
+	private long position;
+
+	/** The current mark, or -1 if no mark exists. */
+	private long mark;
+
+	/** Creates a new multi-array input stream loading it from a measurable input stream.
+	 *
+	 * @param is the input stream that will fill the array.
+	 */
+	
+	public FastMultiByteArrayInputStream( final MeasurableInputStream is ) throws IOException {
+		this( is, is.length() );
+	}
+
+	/** Creates a new multi-array input stream loading it from an input stream.
+	 *
+	 * @param is the input stream that will fill the array.
+	 * @param size the number of bytes to be read from <code>is</code>.
+	 */
+	
+	public FastMultiByteArrayInputStream( final InputStream is, long size ) throws IOException {
+		length = size;
+		array = new byte[ (int)( ( size + SLICE_SIZE - 1 ) / SLICE_SIZE ) ][];
+
+		for( int i = 0; i < array.length; i++ ) {
+			array[ i ] = new byte[ size >= SLICE_SIZE ? SLICE_SIZE : (int)size ];
+			if ( is.read( array[ i ] ) != array[ i ].length ) throw new EOFException();
+			size -= array[ i ].length;
+		}
+	}
+
+	/** Creates a new multi-array input stream sharing the backing arrays of another multi-array input stream. 
+	 *
+	 * @param is the multi-array input stream to replicate.
+	 */
+	public FastMultiByteArrayInputStream( final FastMultiByteArrayInputStream is ) {
+		this.array = is.array;
+		this.length = is.length;
+	}
+
+
+	/** Creates a new multi-array input stream using a given array. 
+	 *
+	 * @param array the backing array.
+	 */
+	public FastMultiByteArrayInputStream( final byte[] array ) {
+		this.array = new byte[ 1 ][];
+		this.array[ 0 ] = array;
+		this.length = array.length;
+	}
+
+
+
+	public boolean markSupported() {
+		return true;
+	}
+
+	public void reset() {
+		position = mark;
+	}
+
+	/** Closing a fast byte array input stream has no effect. */
+	public void close() {}
+
+	public void mark( final int dummy ) {
+		mark = position;
+	}
+
+	/** Returns the number of bytes that can be read (or skipped over) from this input stream without blocking. 
+	 *
+	 * <P>Note that this number may be smaller than the number of bytes actually
+	 * available from the stream if this number exceeds {@link Integer#MAX_VALUE}.
+	 *
+	 * @return the minimum among the number of available bytes and {@link Integer#MAX_VALUE}.
+	 */
+
+	public int available() {
+		if ( length - position > Integer.MAX_VALUE ) return Integer.MAX_VALUE;
+		return (int)( length - position );
+	}
+
+	public long skip( long n ) {
+		if ( n <= length - position ) {
+			position += n;
+
+			return n;
+		}
+		n = length - position;
+		position = length;
+		return n;
+	}
+
+	public int read() {
+		if ( length == position ) return -1;
+		return array[ (int)( position >>> SLICE_BITS ) ][ (int)( position++ & SLICE_MASK ) ] & 0xFF;
+	}
+
+	public int read( final byte[] b, int offset, final int length ) {
+		if ( this.length == this.position ) return length == 0 ? 0 : -1;
+		int res, n = (int)Math.min( length, this.length - this.position ), m = n;
+
+		do {
+			res = Math.min( n, array[ (int)( position >>> SLICE_BITS ) ].length - (int)( position & SLICE_MASK ) );
+			System.arraycopy( array[ (int)( position >>> SLICE_BITS ) ], (int)( position & SLICE_MASK ),
+				b, offset, res );
+				
+			n -= res;
+			offset += res;
+			position += res;
+			
+		} while( n > 0 );
+
+		return m;
+	}
+
+	public long position() {
+		return position;
+	}
+
+	public void position( final long newPosition ) {
+		position = Math.min( newPosition, length );
+	}
+
+	@Override
+	public long length() throws IOException {
+		return length;
+	}
+}
diff --git a/src/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStream.java b/src/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStream.java
new file mode 100644
index 0000000..512940c
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStream.java
@@ -0,0 +1,290 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2013 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or
+ *  modify it under the terms of the GNU Lesser General Public
+ *  License as published by the Free Software Foundation; either
+ *  version 2.1 of the License, or (at your option) any later version.
+ *
+ *  This library is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  Lesser General Public License for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public
+ *  License along with this library; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ *
+ */
+
+import it.unimi.dsi.fastutil.bytes.ByteArrays;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+import java.nio.channels.FileChannel;
+import java.nio.channels.WritableByteChannel;
+
+/** A {@linkplain RepositionableStream repositionable} {@link MeasurableInputStream} based on
+ * cached data received by a {@link WritableByteChannel} whose first bytes can be inspected directly.
+ * 
+ * <p>An instance of this class acts as a buffer holding the bytes written through its
+ * {@link WritableByteChannel} interface (which can be easily turned into an {@link OutputStream} using
+ * {@link Channels#newOutputStream(WritableByteChannel)}). The data can be discarded at any time using
+ * {@link #clear()}. The first {@link #inspectable} bytes of {@link #buffer} contains the first
+ * bytes written. When {@link #buffer} is full, the bytes are written to an <em>overflow
+ * file</em>.
+ * 
+ * <p>At any time, the stream of bytes written since creation (or since the last {@link #clear()})
+ * are available as a fully implemented {@link MeasurableInputStream} which also implements
+ * {@link RepositionableStream} and {@linkplain #mark(int) supports marking}. 
+ * Note that you must arbitrate carefully write and read accesses,
+ * as it is always possible to call {@link #write(ByteBuffer)}
+ * and thus modify the {@linkplain #length() length} of the {@link MeasurableInputStream}.
+ * 
+ * <p>The method {@link #close()} makes the {@link MeasurableInputStream} and {@link WritableByteChannel} state-changing methods temporarily throw an {@link IOException}, but
+ * does not otherwise modify the state of the stream (i.e., the stream can be {@linkplain #clear() cleared} again). The method {@link #dispose()} can be used to release
+ * the resources associated with the stream.
+ * 
+ * <h2>Buffering</h2>
+ * 
+ * <p>This class provides no form of buffering except for the memory buffer described above, both
+ * when reading and when writing. Users should consider wrapping instances of this class with a
+ * {@link FastBufferedInputStream}, as reads after the buffer has been exhausted will be performed
+ * directly on a {@link RandomAccessFile}. */
+public class InspectableFileCachedInputStream extends MeasurableInputStream implements RepositionableStream, WritableByteChannel {
+	public static final boolean DEBUG = false;
+	
+	/** The default buffer size (64KiB). */
+	public static final int DEFAULT_BUFFER_SIZE = 64 * 1024;
+	
+	/** The inspection buffer. The first {@link #inspectable} bytes contain the first part of the input stream.
+	 * The buffer is available for inspection, but users should not modify its content. */
+	public final byte[] buffer;
+	
+	/** The number of valid bytes currently in {@link #buffer}. */
+	public int inspectable;
+	
+	/** The overflow file used by this stream: it is created at construction time, and deleted on {@link #close()}. */
+	private final File overflowFile;
+	
+	/** The random access file used to access the overflow file. */
+	private final RandomAccessFile randomAccessFile;
+
+	/** {@link #randomAccessFile randomAccessFile#getChannel()}, cached. */
+	private final FileChannel fileChannel;
+
+	/** The position on this stream (i.e., the index of the next byte to be returned). */
+	private long position;
+
+	/** The {@linkplain #mark(int) mark}, if set, or -1. */
+	private long mark;
+
+	/** The write position of the {@link #randomAccessFile overflow file}. When {@link #inspectable} is equal
+	 * to {@link #buffer buffer.length}, the length of the stream is {@link #inspectable} + {@link #writePosition}.  */
+	private long writePosition;
+
+
+	/** Creates a new instance with specified buffer size and overlow-file directory.
+	 * 
+	 * @param bufferSize the buffer size, in bytes.
+	 * @param overflowFile the directory where the overflow file should be created, or <code>null</code> for the default temporary directory.
+	 */
+	public InspectableFileCachedInputStream( final int bufferSize, final File overflowFile ) throws IOException {
+		if ( bufferSize <= 0 ) throw new IllegalArgumentException( "Illegal buffer size " + bufferSize );
+		if ( overflowFile != null ) this.overflowFile = overflowFile;
+		else ( this.overflowFile = File.createTempFile( getClass().getSimpleName(), "overflow" ) ).deleteOnExit();
+		buffer = new byte[ bufferSize ];
+		randomAccessFile = new RandomAccessFile( this.overflowFile, "rw" );
+		fileChannel = randomAccessFile.getChannel();
+		mark = -1;
+	}
+	
+	/** Creates a new instance with specified buffer size and default overflow-file directory.
+	 * 
+	 * @param bufferSize the buffer size, in bytes.
+	 */
+	public InspectableFileCachedInputStream( final int bufferSize ) throws IOException {
+		this( bufferSize, null );
+	}
+	
+	/** Creates a new instance with default buffer size and overflow-file directory. */
+	public InspectableFileCachedInputStream() throws IOException {
+		this( DEFAULT_BUFFER_SIZE );
+	}
+
+	private void ensureOpen() throws IOException {
+		if ( position == -1 ) throw new IOException( "This " + getClass().getSimpleName() + " is closed" );
+	}
+
+	/** Clears the content of this {@link InspectableFileCachedInputStream}, zeroing the length of the represented
+	 * stream. */
+	public void clear() throws IOException {
+		if ( ! fileChannel.isOpen() ) throw new IOException( "This " + getClass().getSimpleName() + " is closed" );
+		writePosition = position = inspectable = 0;
+		mark = -1;
+	}
+	
+	/** Appends the content of a specified buffer to the end of the currently represented stream.
+	 * 
+	 * @param byteBuffer a byte buffer.
+	 * @return the number of bytes appended (i.e., {@link ByteBuffer#remaining() byteBuffer.remaining()}). 
+	 */
+	public int write( final ByteBuffer byteBuffer ) throws IOException {
+		ensureOpen();
+		final int remaining = byteBuffer.remaining();
+
+		if ( inspectable < buffer.length ) {
+			// Still some space in the inspectable buffer.
+			final int toBuffer = Math.min( buffer.length - inspectable, remaining );
+			byteBuffer.get( buffer, inspectable, toBuffer );
+			inspectable += toBuffer;
+		}
+
+		if ( byteBuffer.hasRemaining() ) {
+			fileChannel.position( writePosition );
+			writePosition += fileChannel.write( byteBuffer );
+		}
+		
+		return remaining;
+	}
+		
+	/** Truncates the overflow file to a given size if possible.
+	 * 
+	 * @param size the new size; the final size is the maximum between the current write position (i.e., the length
+	 * of the represented stream minus the length of the inspection buffer) and this value.
+	 */
+	public void truncate( final long size ) throws FileNotFoundException, IOException {
+		fileChannel.truncate( Math.max( size, writePosition ) );
+	}
+	
+	/** Makes the stream unreadable until the next {@link #clear()}. */
+	@Override
+	public void close() throws IOException {
+		position = -1;
+	}
+
+	/** Disposes this stream, deleting the overflow file. After that, the stream is unusable. */
+	public void dispose() throws IOException {
+		position = -1;
+		randomAccessFile.close();
+		overflowFile.delete();
+	}
+
+	protected void finalize() throws Throwable {
+		try {
+			dispose();
+		}
+		finally {
+			super.finalize();
+		}
+	}
+
+	@Override
+	public int available() throws IOException {
+		ensureOpen();
+		return (int)Math.min( Integer.MAX_VALUE, length() - position );
+	}
+	
+	@Override
+	public int read( byte[] b, int offset, int length ) throws IOException {
+		ensureOpen();
+		if ( length == 0 ) return 0;
+		if ( position == length() ) return -1; // Nothing to read.
+		ByteArrays.ensureOffsetLength( b, offset, length );
+		int read = 0;
+		
+		if ( position < inspectable ) {
+			/* The first min(inspectable - readPosition, length) bytes should be taken from the buffer. */
+			final int toCopy = Math.min( inspectable - (int)position, length );
+			System.arraycopy( buffer, (int)position, b, offset, toCopy );
+			length -= toCopy;
+			offset += toCopy;
+			position += toCopy;
+			read = toCopy;
+		}
+
+		if ( length > 0 ) { // More to read.
+			fileChannel.position( position - inspectable );
+			final int toRead = (int)Math.min( length() - position, length );
+			randomAccessFile.read( b, offset, toRead );
+			position += toRead;
+			read += toRead;
+		}
+		
+		return read;
+	}
+	
+	@Override
+	public int read( byte[] b ) throws IOException {
+		return read( b, 0, b.length );
+	}
+
+	@Override
+	public long skip( final long n ) throws IOException {
+		ensureOpen();
+		final long toSkip = Math.min( n, length() - position );
+		position += toSkip;
+		return toSkip;
+	}
+	
+	@Override
+	public int read() throws IOException {
+		ensureOpen();
+		if ( position == length() ) return -1; // Nothing to read
+		if ( position < inspectable ) return buffer[ (int)position++ ] & 0xFF;
+		fileChannel.position( position - inspectable );
+		position++;
+		return randomAccessFile.read();
+	}
+
+	@Override
+	public long length() throws IOException {
+		ensureOpen();
+		return inspectable + writePosition; 
+	}
+
+	@Override
+	public long position() throws IOException {
+		ensureOpen();
+		return position;
+	}
+
+	/** Positions the input stream.
+	 * 
+	 * @param position the new position (will be minimized with {@link #length()}).
+	 */
+	public void position( final long position ) throws IOException {
+		this.position = Math.min( position, length() );
+	}
+
+	@Override
+	public boolean isOpen() {
+		return position != -1;
+	}
+	
+	@Override
+	public void mark( final int readlimit ) {
+		mark = position;
+	}
+
+	@Override
+	public void reset() throws IOException {
+		ensureOpen();
+		if ( mark == -1 ) throw new IOException( "Mark has not been set" );
+		position( mark );
+	}
+
+	@Override
+	public boolean markSupported() {
+		return true;
+	}
+}
diff --git a/src/it/unimi/dsi/fastutil/io/MeasurableInputStream.java b/src/it/unimi/dsi/fastutil/io/MeasurableInputStream.java
new file mode 100644
index 0000000..6010cda
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/MeasurableInputStream.java
@@ -0,0 +1,33 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2006-2011 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or
+ *  modify it under the terms of the GNU Lesser General Public
+ *  License as published by the Free Software Foundation; either
+ *  version 2.1 of the License, or (at your option) any later version.
+ *
+ *  This library is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  Lesser General Public License for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public
+ *  License along with this library; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ *
+ */
+
+
+import java.io.InputStream;
+
+/** An {@link InputStream} that implements also the {@link MeasurableStream} interface.
+ *
+ * @since 5.0.4
+ */
+
+public abstract class MeasurableInputStream extends InputStream implements MeasurableStream {
+}
diff --git a/src/it/unimi/dsi/fastutil/io/MeasurableOutputStream.java b/src/it/unimi/dsi/fastutil/io/MeasurableOutputStream.java
new file mode 100644
index 0000000..80f7ca1
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/MeasurableOutputStream.java
@@ -0,0 +1,33 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2010-2011 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or
+ *  modify it under the terms of the GNU Lesser General Public
+ *  License as published by the Free Software Foundation; either
+ *  version 2.1 of the License, or (at your option) any later version.
+ *
+ *  This library is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  Lesser General Public License for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public
+ *  License along with this library; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ *
+ */
+
+
+import java.io.OutputStream;
+
+/** An {@link OutputStream} that implements also the {@link MeasurableStream} interface.
+ *
+ * @since 6.0.0
+ */
+
+public abstract class MeasurableOutputStream extends OutputStream implements MeasurableStream {
+}
diff --git a/src/it/unimi/dsi/fastutil/io/MeasurableStream.java b/src/it/unimi/dsi/fastutil/io/MeasurableStream.java
new file mode 100644
index 0000000..a4d2e59
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/MeasurableStream.java
@@ -0,0 +1,56 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2010-2011 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or
+ *  modify it under the terms of the GNU Lesser General Public
+ *  License as published by the Free Software Foundation; either
+ *  version 2.1 of the License, or (at your option) any later version.
+ *
+ *  This library is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  Lesser General Public License for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public
+ *  License along with this library; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ *
+ */
+
+
+import java.io.IOException;
+
+/** An stream that provides eager access to its length,
+ * and keeps track of the current position (e.g., the number of bytes read so far, or the current
+ * position of the file pointer). 
+ *
+ * <P>This class has two methods, both specified as optional. This apparently bizarre
+ * behaviour is necessary because of wrapper classes which use reflection 
+ * to support those methods (see, e.g., {@link MeasurableInputStream}, {@link FastBufferedInputStream} and {@link FastBufferedOutputStream}).
+ * 
+ * @since 6.0.0
+ */
+
+public interface MeasurableStream  {
+	
+	/** Returns the overall length of this stream (optional operation). In most cases, this will require the
+	 *  stream to perform some extra action, possibly changing the state of the input stream itself (typically, reading
+	 *  all the bytes up to the end, or flushing on output stream).
+	 *  Implementing classes should always document what state will the input stream be in
+	 *  after calling this method, and which kind of exception could be thrown.
+	 */ 
+	public long length() throws IOException;
+
+	/** Returns the current position in this stream (optional operation).
+	 * 
+	 * <p>Usually, the position is just the number of bytes read or written
+	 * since the stream was opened, but in the case of a
+	 * {@link it.unimi.dsi.fastutil.io.RepositionableStream} it
+	 * represent the current position.
+	 */ 
+	public long position() throws IOException;
+}
diff --git a/src/it/unimi/dsi/fastutil/io/RepositionableStream.java b/src/it/unimi/dsi/fastutil/io/RepositionableStream.java
new file mode 100644
index 0000000..349c9eb
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/RepositionableStream.java
@@ -0,0 +1,45 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2005-2011 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or
+ *  modify it under the terms of the GNU Lesser General Public
+ *  License as published by the Free Software Foundation; either
+ *  version 2.1 of the License, or (at your option) any later version.
+ *
+ *  This library is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  Lesser General Public License for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public
+ *  License along with this library; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ *
+ */
+
+
+/** A basic interface specifying positioning methods for a byte stream.
+ *
+ * @author Sebastiano Vigna
+ * @since 4.4
+ */
+
+public interface RepositionableStream {
+
+	/** Sets the current stream position.
+	 *
+	 * @param newPosition the new stream position.
+	 */
+	void position( long newPosition ) throws java.io.IOException;
+
+	/** Returns the current stream position.
+	 *
+	 * @return the current stream position.
+	 */
+	long position() throws java.io.IOException;
+
+}
diff --git a/src/it/unimi/dsi/fastutil/io/package.html b/src/it/unimi/dsi/fastutil/io/package.html
new file mode 100644
index 0000000..e68ee53
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/io/package.html
@@ -0,0 +1,19 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides classes and static methods that make object and primitive-type I/O easier and faster.
+
+
+	<h2>PackageSpecificaton</h2>
+      <P>Classes in this package provide very efficient, unsynchronised buffered
+	input and output stream (with support for repositioning, too) and fast streams
+	based on byte arrays. Static containers
+	provide instead a wealth of methods that can be used to serialize/deserialize
+	very easily objects and arrays.
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/longs/package.html b/src/it/unimi/dsi/fastutil/longs/package.html
new file mode 100644
index 0000000..772ffd4
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/longs/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for long elements or keys.
+
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/objects/package.html b/src/it/unimi/dsi/fastutil/objects/package.html
new file mode 100644
index 0000000..04fbac2
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/objects/package.html
@@ -0,0 +1,20 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for object elements or keys.
+
+         <P>Whenever possible, <code>fastutil</code> provides both typical
+         collections, which compare objects using <code>equals()</code>, and
+         <em>reference-based</em> collections, which use equality
+         (<code>==</code>). See the related comments in the overview.
+
+         <P>Of course, reference-based sorted sets and maps make no
+         sense, and are not generated.
+
+  </body>
+</html>
diff --git a/src/it/unimi/dsi/fastutil/shorts/package.html b/src/it/unimi/dsi/fastutil/shorts/package.html
new file mode 100644
index 0000000..094f8d0
--- /dev/null
+++ b/src/it/unimi/dsi/fastutil/shorts/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+	 <P>Provides type-specific classes for short elements or keys.
+
+  </body>
+</html>
diff --git a/src/overview.html b/src/overview.html
new file mode 100644
index 0000000..df4d965
--- /dev/null
+++ b/src/overview.html
@@ -0,0 +1,878 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+  <head>
+    <title>fastutil</title>
+  </head>
+
+  <body>
+
+    <P>Extends the the <a href="http://java.sun.com/j2se/1.5/docs/guide/collections/">Java™ Collections Framework</a>
+    by providing type-specific maps, sets, lists and priority queues with a small memory
+    footprint and fast access and insertion; provides also big (64-bit) arrays, sets and lists, and
+    fast, practical I/O classes for binary and text files. It is 
+    free software
+    distributed under the <A HREF="http://www.apache.org/licenses/LICENSE-2.0.html">Apache License 2.0</A>.
+
+	<p><strong>Warning:</strong> <code>fastutil 6.1.0</code> has been significantly reorganised.
+	A number of not-so-useful classes (double- and sequi-indirect priority queues) are no longer
+	distributed (but you can still generate their sources). The old implementation of
+	hash tables (both sets and maps) has been replaced by a linear-probing implementation that is
+	about twice faster and has true deletions, but does not let you set a growth factor (again,
+	you can still generate their sources).
+
+    <h2>Package Specification</h2>
+
+	<p><code>fastutil</code> is formed by three cores:
+	<ul>
+	<li>type-specific classes that extend naturally
+	the <a href="http://java.sun.com/j2se/1.5/docs/guide/collections/">Java™ Collections Framework</a>;
+	<li>classes that support very large collections;
+	<li>classes for fast and practical access to binary and text files.
+	</ul>
+	<p>The three cores are briefly introduced in the next sections, and then discussed at length in the rest of this overview.
+
+	<h3>Type-specific classes</h3>
+	
+    <p><code>fastutil</code> specializes the most useful {@link
+      java.util.HashSet}, {@link java.util.HashMap}, {@link
+      java.util.LinkedHashSet}, {@link java.util.LinkedHashMap}, {@link
+      java.util.TreeSet}, {@link java.util.TreeMap}, {@link
+      java.util.IdentityHashMap}, {@link java.util.ArrayList} and {@link
+      java.util.Stack} classes to versions that accept a specific kind of
+      key or value (e.g., {@linkplain it.unimi.dsi.fastutil.ints.IntSet integers}). Besides, there are also
+      several types of {@linkplain it.unimi.dsi.fastutil.PriorityQueue priority
+      queues} and a large collection of static objects and
+      methods (such as {@linkplain it.unimi.dsi.fastutil.objects.ObjectSets#EMPTY_SET
+      immutable empty containers}, {@linkplain
+      it.unimi.dsi.fastutil.ints.IntComparators#OPPOSITE_COMPARATOR
+      comparators implementing the opposite of the natural order},
+      {@linkplain it.unimi.dsi.fastutil.ints.IntIterators#wrap(int[])
+      iterators obtained by wrapping an array} and so on.</p>
+
+	<p>To understand what's going on at a glance, the best thing is to look at
+    the <A HREF="#example">examples</A> provided. If you already used the
+    Collections Framework, everything should look
+    rather natural. If, in particular, you use an IDE such as <A
+    HREF="http://www.eclipse.org/">Eclipse</A>, which can suggest you the
+    method names, all you need to know is <A HREF="#names">the right name for
+    the class you need</A>.
+    	
+	<h3>Support for very large collections</h3>
+	
+	<p>With <code>fastutil</code> 6, a new set of classes makes it possible
+	to handle very large collections: in particular, collections whose size exceeds
+	2<sup>31</sup>. {@linkplain it.unimi.dsi.fastutil.BigArrays Big arrays}
+	are arrays-of-arrays handled by a wealth of static methods that act on them
+	as if they were monodimensional arrays with 64-bit indices;
+	{@linkplain it.unimi.dsi.fastutil.BigList big lists} provide 64-bit list access; 
+	{@linkplain it.unimi.dsi.fastutil.ints.IntOpenHashBigSet big hash sets} provide support for sets whose
+	size is only limited by the amount of core memory.
+	
+	<p>The usual methods from {@link java.util.Arrays} and similar classes have
+	been extended to big arrays: have a look at the Javadoc documentation of
+	{@link it.unimi.dsi.fastutil.BigArrays} and {@link it.unimi.dsi.fastutil.ints.IntBigArrays}
+	to get an idea of the generic and type-specific methods available.
+	
+	<h3>Fast and practical I/O</h3>
+	
+	<code>fastutil</code> provides replacements for some standard classes of {@link java.io}
+	that are plagued by a number of problems (see, e.g., {@link it.unimi.dsi.fastutil.io.FastBufferedInputStream}).
+	The {@link it.unimi.dsi.fastutil.io.BinIO} and {@link it.unimi.dsi.fastutil.io.TextIO} static
+	containers contain dozens of methods that make it possible to load and save quickly
+	(big) arrays to disks, to adapt binary and text file to iterators, and so on.
+
+    <h2>More on type-specific classes</h2>
+
+    <p>All data structures in <code>fastutil</code> implement their standard
+    counterpart interface whenever possible (e.g., {@link java.util.Map} for maps). Thus, they
+    can be just plugged into existing code, using the standard access methods
+    (of course, any attempt to use the wrong type for keys or values will
+    produce a {@link java.lang.ClassCastException}). However, they also provide
+    (whenever possible) many polymorphic versions of the most used methods that
+    avoid boxing/unboxing. In doing so, they implement more stringent interfaces that
+    extend and strengthen the standard ones (e.g., {@link
+    it.unimi.dsi.fastutil.ints.Int2IntSortedMap} or {@link
+    it.unimi.dsi.fastutil.ints.IntListIterator}).</p>
+    
+    <p><strong>Warning</strong>: automatic boxing and unboxing can lead you
+    to choose the wrong method when using <code>fastutil</code>. It is also extremely inefficient. 
+    We suggest that your programming environment is set so to mark boxin/unboxing as
+    a warning, or even better, as an error.
+          
+    <p>Of course, the main point of type-specific data structures is that the
+    absence of wrappers around primitive types can increase speed and reduce
+    space occupancy by several times. The presence  of generics in Java
+    does not change this fact, since there is no genericity for primitive
+    types.
+    
+    <p>The implementation techniques used in <code>fastutil</code> are quite
+    different than those of {@link java.util}: for instance, open-addressing
+    hash tables, threaded AVL trees, threaded red-black trees and exclusive-or
+    lists. An effort has also been made to provide powerful derived objects and
+    to expose them overriding covariantly return types:
+    for instance, the {@linkplain it.unimi.dsi.fastutil.objects.Object2IntSortedMap#keySet() keys of sorted maps
+    are sorted} and iterators on sorted containers are always {@linkplain
+    it.unimi.dsi.fastutil.BidirectionalIterator bidirectional}.
+
+    <p>More generally, the rationale behing <code>fastutil</code> is that
+    <em>you should never need to code explicitly natural
+    transformations</em>. You do to not need to define an anonymous class to
+    iterate over an array of integers—just {@linkplain
+    it.unimi.dsi.fastutil.ints.IntIterators#wrap(int[]) wrap it}. You do not
+    need to write a loop to put the characters returned by an iterator into a
+    set—just {@linkplain
+    it.unimi.dsi.fastutil.chars.CharOpenHashSet#CharOpenHashSet(CharIterator)
+    use the right constructor}. And so on.
+
+      <h3><A NAME="names"></A>The Names</h3>
+
+    <p>In general, class names adhere to the general pattern</p>
+
+    <div style="padding: 1em">
+      <var>valuetype</var> <var>collectiontype</var>
+    </div>
+    
+    <p>for collections, and</p>
+    
+    <div style="padding: 1em">
+      <var>keytype</var> 2 <var>valuetype</var> <var>maptype</var>
+    </div>
+    
+    <p>for maps.
+
+    <P>By "type" here I mean a capitalized primitive type, {@link
+      java.lang.Object} or <code>Reference</code>. In the latter case, we
+      are treating objects, but their equality is established by reference
+      equality (that is, without invoking <code>equals()</code>), similarly
+      to {@link java.util.IdentityHashMap}. Of course, reference-based
+      classes are significantly faster.</p>
+    
+    <P>Thus, an {@link it.unimi.dsi.fastutil.ints.IntOpenHashSet} stores
+    integers efficiently and implements {@link
+    it.unimi.dsi.fastutil.ints.IntSet}, whereas a {@link
+    it.unimi.dsi.fastutil.longs.Long2IntAVLTreeMap} does the same for maps from
+    longs to integers (but the map will be sorted, tree based, and balanced
+    using the AVL criterion), implementing {@link
+    it.unimi.dsi.fastutil.longs.Long2IntMap}. If you need additional
+    flexibility in choosing your {@linkplain
+    it.unimi.dsi.fastutil.Hash.Strategy hash strategy}, you can put, say, arrays
+    of integers in a {@link it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet},
+    maybe using the ready-made {@linkplain
+    it.unimi.dsi.fastutil.ints.IntArrays#HASH_STRATEGY hash strategy for
+    arrays}.  A {@link it.unimi.dsi.fastutil.longs.LongLinkedOpenHashSet}
+    stores longs in a hash table, but provides a predictable iteration order
+    (the insertion order) and access to first/last elements of the order. A
+    {@link it.unimi.dsi.fastutil.objects.Reference2ReferenceOpenHashMap} is
+    similar to an {@link java.util.IdentityHashMap}. You can manage a priority
+    queue of characters in a heap using a {@link
+    it.unimi.dsi.fastutil.chars.CharHeapPriorityQueue}, which implements {@link
+    it.unimi.dsi.fastutil.chars.CharPriorityQueue}.  {@linkplain
+    it.unimi.dsi.fastutil.bytes.ByteArrayFrontCodedList Front-coded lists} are
+    highly specialized immutable data structures that store compactly a large
+    number of arrays: if you don't know them you probably don't need them.
+
+	 <p>For a number of data structures that were not available in the 
+	  <a href="http://java.sun.com/j2se/1.5/docs/guide/collections/">Java™ Collections Framework</a>
+	  when <code>fastutil</code> was created, an object-based version is
+	  contained {@link it.unimi.dsi.fastutil}, and in that case the prefix
+	  <code>Object</code> is not used (see, e.g., {@link it.unimi.dsi.fastutil.PriorityQueue}).
+	  
+    <p>Since there are eight primitive types in Java, and we support
+    reference-based containers, we get 1877 (!) classes (some nonsensical
+    classes, such as <code>Boolean2BooleanAVLTreeMap</code>, are not
+    generated). Many classes are generated just to mimic the hierarchy of
+    {@link java.util} so to redistribute common code in a similar way. There
+    are also several abstract classes that ease significantly the creation of
+    new type-specific classes by providing automatically generic methods based
+    on the type-specific ones.</p>
+
+    <p>The huge number of classes required a suitable division in subpackages
+    (more than anything else, to avoid crashing browsers with a preposterous
+    package summary). Each subpackage is characterized by the type of elements
+    or keys: thus, for instance, {@link it.unimi.dsi.fastutil.ints.IntSet}
+    belongs to {@link it.unimi.dsi.fastutil.ints} (the plural is required, as
+    <code>int</code> is a keyword and cannot be used in a package name), as
+    well as {@link it.unimi.dsi.fastutil.ints.Int2ReferenceRBTreeMap}. Note
+    that all classes for non-primitive elements and keys are gathered in {@link
+    it.unimi.dsi.fastutil.objects}. Finally, a number of non-type-specific
+      classes have been gathered in {@link it.unimi.dsi.fastutil}.
+
+      
+      <h3>An  In–Depth Look</h3>
+
+    <P>The following table summarizes the available interfaces and
+    implementations. To get more information, you can look at a specific
+    implementation in {@link
+    it.unimi.dsi.fastutil} or, for instance, {@link it.unimi.dsi.fastutil.ints}.
+
+    <div align=center>
+    <table border=1 summary="Interfaces and Abstract Implementations" title="Interfaces and Abstract Implementations">
+	<tr><th>Interfaces<th>Abstract Implementations<th>Implementations
+	<tr><td>Iterable
+	<tr><td>Collection<td>AbstractCollection
+	<tr><td>Set<td>AbstractSet<td>OpenHashSet, OpenCustomHashSet, ArraySet, OpenHashBigSet
+	<tr><td>SortedSet<td>AbstractSortedSet<td>RBTreeSet, AVLTreeSet, LinkedOpenHashSet
+	<tr><td>Function<td>AbstractFunction<td>
+	<tr><td>Map<td>AbstractMap<td>OpenHashMap, OpenCustomHashMap, ArrayMap
+	<tr><td>SortedMap<td>AbstractSortedMap<td>RBTreeMap, AVLTreeMap, LinkedOpenHashMap
+	<tr><td>List, BigList†<td>AbstractList, AbstractBigList<td>ArrayList, BigArrayBigList, ArrayFrontCodedList
+	<tr><td>PriorityQueue†<td>AbstractPriorityQueue†<td>HeapPriorityQueue, ArrayPriorityQueue, ArrayFIFOQueue
+	<tr><td>IndirectPriorityQueue†<td>AbstractIndirectPriorityQueue†<td>HeapSemiIndirectPriorityQueue, HeapIndirectPriorityQueue, ArrayIndirectPriorityQueue
+	<tr><td>Stack†<td>AbstractStack†<td>ArrayList
+	<tr><td>Iterator, BigListIterator†<td>AbstractIterator, AbstractListIterator, AbstractBigListIterator
+	<tr><td>Comparator<td>AbstractComparator
+	<tr><td>BidirectionalIterator†<td>AbstractBidirectionalIterator
+	<tr><td>ListIterator<td>AbstractListIterator
+	<tr><td>Size64‡	
+    </table>
+    </div>
+
+    <P>†: this class has also a non-type-specific implementation in {@link it.unimi.dsi.fastutil}.
+    <P>‡: this class has <em>only</em> a non-type-specific implementation in {@link it.unimi.dsi.fastutil}.
+    
+    <P>Note that abstract implementations are named by prefixing the interface
+    name with <samp>Abstract</samp>. Thus, if you want to define a
+    type-specific structure holding a set of integers without the hassle of
+    defining object-based methods, you should inherit from {@link it.unimi.dsi.fastutil.ints.AbstractIntSet}.
+
+    <P>The following table summarizes static containers, which usually give rise
+    both to a type-specific and to a generic class:
+      
+    <div align=center>
+    <table border=1 style="border: solid thin black" title="Static Containers" summary="Static Containers">
+	<tr><th>Static Containers
+	<tr><td>Collections
+	<tr><td>Sets
+	<tr><td>SortedSets
+	<tr><td>Functions
+	<tr><td>Maps†
+	<tr><td>SortedMaps
+	<tr><td>Lists
+	<tr><td>BigLists
+	<tr><td>Arrays†
+	<tr><td>BigArrays†
+	<tr><td>Heaps
+	<tr><td>SemiIndirectHeaps
+	<tr><td>IndirectHeaps
+	<tr><td>PriorityQueues†
+	<tr><td>IndirectPriorityQueues†
+	<tr><td>Iterators
+	<tr><td>BigListIterators
+	<tr><td>Comparators
+	<tr><td>Hash‡
+	<tr><td>HashCommon‡
+    </table>
+    </div>
+
+    <P>†: this class has also a non-type-specific implementation in {@link it.unimi.dsi.fastutil}.
+    <P>‡: this class has <em>only</em> a non-type-specific implementation in {@link it.unimi.dsi.fastutil}.
+
+    <P>The static containers provide also special-purpose implementations for
+    all kinds of {@linkplain it.unimi.dsi.fastutil.objects.ObjectSets#EMPTY_SET empty
+    structures} (including {@linkplain
+    it.unimi.dsi.fastutil.objects.ObjectArrays#EMPTY_ARRAY arrays}) and
+    {@linkplain it.unimi.dsi.fastutil.ints.Int2IntMaps#singleton(int,int) singletons}.
+ 
+    <h3>Warnings</h3>
+
+    <p><strong>All classes are not synchronized</strong>. If multiple threads access one
+      of these classes concurrently, and at least one of the threads modifies it,
+      it must be synchronized externally. Iterators will behave unpredictably in
+      the presence of concurrent modifications. Reads, however, can be carried
+      out concurrently.
+
+    <p><strong>Reference-based classes violate the {@link java.util.Map}
+	contract</strong>. They intentionally compare objects by reference, and do
+      not use the <code>equals()</code> method. They should be used only
+      when reference-based equality is desired (for instance, if all
+      objects involved are canonized, as it happens with interned strings).
+
+    <p><strong>Linked classes do not implement wholly the {@link
+	java.util.SortedMap} interface</strong>. They provide methods to get the
+      first and last element in iteration order, and to start a bidirectional iterator from any element,
+      but any submap or subset
+      method will cause an {@link java.lang.UnsupportedOperationException}
+      (this may change in future versions).
+
+    <p><strong>Substructures in sorted classes allow the creation of
+	arbitrary substructures</strong>. In {@link java.util}, instead, you
+      can only create contained sub-substructures (BTW, why?). For instance,
+      <code>(new TreeSet()).tailSet(1).tailSet(0)</code> will throw an exception, but {@link
+      it.unimi.dsi.fastutil.ints.IntRBTreeSet (new
+      IntRBTreeSet()).tailSet(1).tailSet(0)} won't.
+
+    <p><strong>Immutability is syntactically based (as opposed to
+	semantically based)</strong>. All methods that are known not to be
+      causing modifications to the structure at compile time will not throw
+      exceptions (e.g., {@link it.unimi.dsi.fastutil.objects.ObjectSets#EMPTY_SET
+      EMPTY_SET.clear()}). All other methods will cause an {@link
+      java.lang.UnsupportedOperationException}.  Note that (as of Java 5)
+      the situation in {@link java.util} is definitely different, and
+      inconsistent: for instance, in singletons <code>add()</code> always
+      throws an exception, whereas <code>remove()</code> does it only if the
+      singleton would be modified. This behaviour agrees with the interface documentation, 
+      but it is nonetheless confusing.
+
+    <h3>Additional Features and Methods</h3>
+
+    <p>The new interfaces add some very natural methods and strengthen many of
+    the old ones. Moreover, whenever possible, the object returned is type-specific, 
+    or implements a more powerful interface. Before <code>fastutil</code> 5, the 
+    impossibility of overriding covariantly return types made these features
+    accessible only by means of type casting, but fortunately this is no longer true. 
+
+    <P>More in detail:
+    <UL>
+      
+      <LI>Keys and values of a map are of the <code>fastutil</code> type you
+	would expect (e.g., the keys of an {@link
+	it.unimi.dsi.fastutil.ints.Int2LongSortedMap} are an {@link
+	it.unimi.dsi.fastutil.ints.IntSortedSet} and the values are a {@link
+	it.unimi.dsi.fastutil.longs.LongCollection}).
+
+      <LI>Hash-based maps that return primitive numeric values have an <code>add()</code>
+	method (see, e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap#add(int,int)})
+	that <em>adds</em> an increment to the current value of a key; it is
+	most useful to avoid the inefficient procedure of getting a value,
+	incrementing it and then putting it back into the map (typically, when
+	counting the number of occurrences of elements in a sequence).
+
+	<li>Hash-set implementations have an additional {@link it.unimi.dsi.fastutil.objects.ObjectOpenHashSet#get(Object) get()}
+	method that returns the actual object in the collection that is equal to the query key.
+
+      <LI>Linked hash-based maps and sets have a wealth of additional methods that make it easy
+      to use them as caches. See, for instance, {@link it.unimi.dsi.fastutil.ints.Int2IntLinkedOpenHashMap#putAndMoveToLast(int,int)},
+      {@link it.unimi.dsi.fastutil.ints.IntLinkedOpenHashSet#addAndMoveToLast(int)}
+      or {@link it.unimi.dsi.fastutil.ints.Int2IntLinkedOpenHashMap#removeFirstInt()}.
+      
+      <LI>Submaps of a sorted map and subsets of a sorted sets are of the
+	<code>fastutil</code> type you would expect, too. 
+
+      <LI>Iterators returned by <code>iterator()</code> are type-specific.
+
+      <LI>Sorted structures in <code>fastutil</code> return
+	type-specific {@linkplain
+	it.unimi.dsi.fastutil.BidirectionalIterator bidirectional
+	iterators}. This means that you can move back and forth among
+	entries, keys or values.
+
+	<li>Some classes for maps (check the specification) return a <em>fast entry set</em>
+	(see, e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap#int2IntEntrySet()});
+	fast entry sets can, in turn, provide a <em>{@linkplain it.unimi.dsi.fastutil.ints.Int2IntMap.FastEntrySet#fastIterator()}</em>
+	that is guaranteed not to create a large number of objects, <em>possibly by returning always the same entry</em> (of course, mutated).
+
+      <LI>The type-specific sorted set interfaces, moreover, feature an optional
+	method <code>iterator(from)</code> which creates a type-specific {@link
+	it.unimi.dsi.fastutil.BidirectionalIterator} starting from a given
+	element of the domain (not necessarily in the set). See, for instance,
+	{@link it.unimi.dsi.fastutil.ints.IntSortedSet#iterator(int)}. The method is
+	implemented by all type-specific sorted sets and subsets.
+
+      <LI>Finally, there are constructors that allow you to build easily sets
+	using array and iterators. This means, for instance, that you can create quickly a
+	set of strings with a statement like
+	<blockquote>
+	  <code>new ObjectOpenHashSet( new String[] { "foo", "bar" } )</code>
+	</blockquote>
+	or just "unroll" the integers returned by an iterator into a list with
+	<blockquote>
+	  <code>new IntArrayList( iterator )</code>
+	</blockquote>
+
+    </UL>
+
+
+    <P>There are a few quirks, however, that you should be aware of:
+
+    <ul>
+
+      <li>The versions of the {@link java.util.Map#get(Object)
+	get()}, {@link java.util.Map#put(Object,Object) put()} and
+	{@link java.util.Map#remove(Object) remove()} methods that
+	return a primitive type cannot, of course, rely on returning
+	<code>null</code> to denote the absence of a certain
+	pair. Rather, they return a <em>{@linkplain
+	  it.unimi.dsi.fastutil.ints.Int2LongMap#defaultReturnValue(long) default 
+	  return value}</em>, which is set to 0 cast to the
+	return type (<code>false</code> for booleans) at creation, but
+	can be changed using the <code>defaultReturnValue()</code>
+	method (see, e.g., {@link
+	it.unimi.dsi.fastutil.ints.Int2IntMap}). Note that changing the
+	default return value does not change anything about the data
+	structure; it is just a way to return a reasonably meaningful
+	result—it can be changed at any time. For uniformity reasons,
+	even maps returning objects can use
+	<code>defaultReturnValue()</code> (of course, in this case the
+	default return value is initialized to <code>null</code>). A
+	submap or subset has an independent default return value (which
+	however is initialized to the default return value of the
+	originator).</li>
+
+      <li>For all maps that have objects as keys, the {@link
+	java.util.Map#get(Object) get()} and {@link
+	java.util.Map#remove(Object) remove()} methods do not admit
+	polymorphic versions, as Java does not allow return-value
+	polymorphism. Rather, the extended interfaces introduce new
+	methods of the form {@link
+	it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap#getInt(Object)
+	get<var>valuetype</var>()} and {@link
+	it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap#removeInt(Object)
+	remove<var>valuetype</var>()}. Similar problems occur with
+	{@link it.unimi.dsi.fastutil.chars.CharSortedSet#firstChar()
+	first()}, {@link
+	it.unimi.dsi.fastutil.chars.CharSortedSet#lastChar() last()},
+	and so on.</li>
+
+      <LI>Similarly, all iterators have a suitable method {@link
+	it.unimi.dsi.fastutil.ints.IntIterator#nextInt()
+	next<var>type</var>()} returning directly a primitive type.
+	And, of course, you have a type-specific version of {@link
+	java.util.ListIterator#previous() previous()}.
+
+      <li>For the same reason, the method {@link java.util.Collection#toArray}
+	has a polymorphic version accepting a type-specific array, but there are
+	also explicitly typed methods
+	{@link it.unimi.dsi.fastutil.bytes.ByteCollection#toByteArray() to<var>keytype</var>Array()}.</li>
+
+	<li>The standard entry-set iterators for hash-based maps use an entry object
+	that refers to the data contained in the hash table. If you retrieve an
+	entry and deleted it, the entry object will become invalid and will throw
+	an {@link java.util.ArrayIndexOutOfBounds} exception. This does not
+	apply to fast iterators (see above).
+
+
+      <li>A name clash between the list and collection interfaces
+	forces the deletion method of an collection to be named {@link
+	it.unimi.dsi.fastutil.doubles.DoubleCollection#rem(double)
+	rem()}. At the risk of creating some confusion, {@link
+	it.unimi.dsi.fastutil.doubles.DoubleSet#remove(double) remove()}
+	reappears in the type-specific set interfaces, so the only
+	really unpleasant effect is that you must use
+	<code>rem()</code> on variables that are collections, but not
+	sets—for instance, {@linkplain
+	it.unimi.dsi.fastutil.ints.IntList type-specific lists}.
+
+      <li>There are type-specific versions of {@link java.util.Comparator} that
+	require specifying both a type-specific comparison method and an object-based
+	one; this is necessary as a type-specific comparator must implement {@link
+	java.util.Comparator}. However, to simplify the creation of type-specific
+	comparators there are abstract type-specific comparator classes that
+	implement an object-based comparator wrapping the (abstract)
+	type-specific one; thus, if you need to create a type-specific comparator
+	you just have to inherit from those classes and define the type-specific
+	method. Analogously for iterators.
+
+      <li>Stacks are <em>interfaces</em> implemented by array-based
+	lists: the interface, moreover, is slightly different from the
+	implementation contained in {@link java.util.Stack}.
+    </ul>
+
+    <h3>Functions</h3>
+
+    {@link it.unimi.dsi.fastutil.Function} (and its type-specific versions) is a new
+    interface geared towards mathematical functions (e.g., hashes) which associates
+    values to keys, but in which enumerating keys or values is not possible. It is essentially
+    a {@link java.util.Map} that does not provide access to set representations.
+
+    <p><code>fastutil</code>
+    provides interfaces, abstract implementations and the usual array of wrappers
+    in the suitable static container (e.g., {@link it.unimi.dsi.fastutil.ints.Int2IntFunctions}).
+    Implementations will be provided by other projects (e.g., <a href="http://sux.dsi.unimi.it/">Sux4J</a>).
+
+    <p>All <code>fastutil</code> type-specific maps extend their respective type-specific
+    functions: but, alas, we cannot have {@link java.util.Map} extending {@link it.unimi.dsi.fastutil.Function}.
+
+    <h3>Static Container Classes</h3>
+
+    <P><code>fastutil</code> provides a number of static methods and
+      singletons, much like {@link java.util.Collections}. To avoid creating
+      classes with hundreds of methods, there are separate containers for
+      sets, lists, maps and so on. Generic containers are placed in {@link
+      it.unimi.dsi.fastutil}, whereas type-specific containers are in the
+      appropriate package.  You should look at the documentation of the
+      static classes contained in {@link it.unimi.dsi.fastutil}, and in
+      type-specific static classes such as {@link
+      it.unimi.dsi.fastutil.chars.CharSets}, {@link
+      it.unimi.dsi.fastutil.floats.Float2ByteSortedMaps}, {@link
+      it.unimi.dsi.fastutil.longs.LongArrays}, {@link
+      it.unimi.dsi.fastutil.floats.FloatHeaps}.  Presently, you can easily
+      obtain {@linkplain it.unimi.dsi.fastutil.objects.ObjectSets#EMPTY_SET empty collections},
+      {@linkplain it.unimi.dsi.fastutil.longs.Long2IntMaps#EMPTY_MAP empty
+      type-specific collections}, {@linkplain
+      it.unimi.dsi.fastutil.ints.IntLists#singleton(int) singletons}, 
+      {@linkplain
+      it.unimi.dsi.fastutil.objects.Object2ReferenceSortedMaps#synchronize(Object2ReferenceSortedMap)
+      synchronized versions} of any type-specific container and
+      unmodifiable versions of       {@linkplain
+      it.unimi.dsi.fastutil.objects.ObjectLists#unmodifiable(ObjectList)
+      containers} and   {@linkplain
+      it.unimi.dsi.fastutil.ints.IntIterators#unmodifiable(IntBidirectionalIterator) iterators} (of course,
+      unmodifiable containers always return unmodifiable iterators).
+      
+    <P>On a completely different side, the {@linkplain
+      it.unimi.dsi.fastutil.ints.IntArrays type-specific static container
+      classes for arrays} provide several useful methods that allow to treat
+      an array much like an array-based list, hiding completely the growth
+      logic. In many cases, using this methods and an array is even simpler
+      then using a full-blown {@linkplain
+      it.unimi.dsi.fastutil.doubles.DoubleArrayList type-specific
+      array-based list} because elements access is syntactically much
+      simpler. The version for objects uses reflection to return arrays of
+      the same type of the argument.
+
+    <P>For the same reason, <code>fastutil</code> provides a full
+    implementation of methods that manipulate arrays as type-specific
+    {@linkplain it.unimi.dsi.fastutil.ints.IntHeaps heaps}, {@linkplain
+    it.unimi.dsi.fastutil.ints.IntSemiIndirectHeaps semi-indirect heaps} and
+    {@linkplain it.unimi.dsi.fastutil.ints.IntIndirectHeaps indirect heaps}. There are
+    also quicksort and mergesort implementations that use arbitrary type-specific comparators.
+    
+    <p><code>fastutil</code> offers also a less common choice—a very tuned
+    implementation of {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#radixSort(int[],int,int) radix sort} for
+    all primitive types. It is significantly faster than quicksort already at small sizes (say, more than 10000 elements), and should
+    be considered the sorting algorithm of choice if you do not need a generic comparator.
+    
+    <p>There are several variants provided. First of all you can radix sort in parallel
+    {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#radixSort(int[],int[], int, int) two} or
+	{@linkplain it.unimi.dsi.fastutil.ints.IntArrays#radixSort(int[][],int,int) even more} arrays. You
+	can also perform {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#radixSortIndirect(int[],int[],int,int,boolean) indirect} sorts,
+	for instance if you want to compute the sorting permutation of an array. 
+	 
+     <p>The sorting algorithm is a tuned radix sort adapted from Peter M. McIlroy, Keith Bostic and M. Douglas
+     McIlroy, “Engineering radix sort”, <i>Computing Systems</i>, 6(1), pages 5−27 (1993),
+     and further improved using the digit-oracle idea described by
+     Juha Kärkkäinen and Tommi Rantala in “Engineering radix sort for strings”,
+     <i>String Processing and Information Retrieval, 15th International Symposium</i>, volume 5280 of
+     Lecture Notes in Computer Science, pages 3−14, Springer (2008). The basic algorithm is not
+     stable, but this is immaterial for arrays of primitive types. For the indirect case, there is a parameter
+     specifying whether the algorithm should be stable.
+    
+	
+    
+    <h3>Iterators and Comparators</h3>
+
+    <P><code>fastutil</code> provides type-specific iterators and
+      comparators. The interface of a <code>fastutil</code> iterator is
+      slightly more powerful than that of a {@link java.util} iterator, as
+      it contains a {@link it.unimi.dsi.fastutil.objects.ObjectIterator#skip(int)
+      skip()} method that allows to skip over a list of elements (an
+      {@linkplain
+      it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator#back(int) analogous
+      method} is provided for bidirectional iterators). For objects (even
+      those managed by reference), the extended interface is named {@link
+      it.unimi.dsi.fastutil.objects.ObjectIterator}; it is the return type, for
+      instance, of {@link
+      it.unimi.dsi.fastutil.objects.ObjectCollection#iterator()}.
+      
+      <code>fastutil</code> provides also classes and methods that makes it
+      easy to create type-specific iterators and comparators. There are abstract versions of
+      each (type-specific) iterator and comparator that implement in the
+      obvious way some of the methods (see, e.g., {@link
+      it.unimi.dsi.fastutil.ints.AbstractIntIterator} or {@link
+      it.unimi.dsi.fastutil.ints.AbstractIntComparator}).
+
+    <P>A plethora of useful static methods is also provided by various
+      type-specific static containers (e.g., {@link
+      it.unimi.dsi.fastutil.ints.IntIterators}) and {@link
+      it.unimi.dsi.fastutil.ints.IntComparators}: among other things, you can
+      {@linkplain it.unimi.dsi.fastutil.ints.IntIterators#wrap(int[]) wrap
+      arrays} and {@linkplain
+      it.unimi.dsi.fastutil.ints.IntIterators#asIntIterator(java.util.Iterator)
+      standard iterators} in type-specific iterators, {@linkplain
+      it.unimi.dsi.fastutil.ints.IntIterators#fromTo(int,int) generate them}
+      giving an interval of elements to be returned, {@linkplain
+      it.unimi.dsi.fastutil.objects.ObjectIterators#concat(ObjectIterator[])
+      concatenate them} or {@linkplain
+      it.unimi.dsi.fastutil.objects.ObjectIterators#pour(Iterator,ObjectCollection)
+      pour them} into a set.
+
+
+    <h3>Queues</h3>
+
+    <P><code>fastutil</code> offers two types of queues: <em>direct
+    queues</em> and <em>indirect queues</em>. A direct queue offers type-specific method to {@linkplain
+    it.unimi.dsi.fastutil.longs.LongPriorityQueue#enqueue(long) enqueue} and
+    {@linkplain it.unimi.dsi.fastutil.longs.LongPriorityQueue#dequeueLong()
+    dequeue} elements. An indirect queue needs a <em>reference array</em>,
+    specified at construction time: {@linkplain
+    it.unimi.dsi.fastutil.IndirectPriorityQueue#enqueue(int) enqueue} and
+    {@linkplain it.unimi.dsi.fastutil.IndirectPriorityQueue#dequeue()
+    dequeue} operations refer to indices in the reference array. The advantage
+    is that it may be possible to {@linkplain
+    it.unimi.dsi.fastutil.IndirectPriorityQueue#changed(int) notify the change}
+      of any element of the reference array, or even to {@linkplain
+    it.unimi.dsi.fastutil.IndirectPriorityQueue#remove(int) remove an arbitrary
+      element}.
+      
+    <P>Queues have two implementations: a trivial array-based
+    implementation, and a heap-based implementation. In particular, heap-based
+    indirect queues may be {@linkplain
+    it.unimi.dsi.fastutil.objects.ObjectHeapIndirectPriorityQueue fully
+    indirect} or just {@linkplain
+    it.unimi.dsi.fastutil.objects.ObjectHeapSemiIndirectPriorityQueue
+    semi-indirect}: in the latter case, there is no need for an explicit
+    indirection array (which saves one integer per queue entry), but not all
+    operations will be available. Note there there are also
+    {@linkplain it.unimi.dsi.fastutil.ints.IntArrayFIFOQueue FIFO queues}.
+
+
+    <h3>Custom Hashing</h3>
+
+    <P>Sometimes, the behaviour of the built-in equality and hashing methods is
+    not what you want. In particular, this happens if you store in a hash-based
+    collection arrays, and you would like to compare them by equality. For this kind of applications,
+      <code>fastutil</code> provides {@linkplain it.unimi.dsi.fastutil.Hash.Strategy custom hash strategies},
+      which define new equality and hashing methods to be used inside the collection. There are even
+      {@linkplain it.unimi.dsi.fastutil.ints.IntArrays#HASH_STRATEGY ready-made strategies} for arrays. Note, however,
+      that <code>fastutil</code> containers do not cache hash codes, so custom hash strategies must be efficient.
+
+
+    <h3>Abstract Classes</h3>
+
+    <p><code>fastutil</code> provides a wide range of abstract classes, to
+      help in implementing its interfaces. They take care, for instance, of
+      providing wrappers for non-type-specific method calls, so that you have to
+      write just the (usually simpler) type-specific version. 
+
+	<h2>More on the support for very large collections</h2>
+	
+	<p>With the continuous increase in core memory available, Java arrays are starting to show
+	their size limitation (indices cannot be larger than 2<sup>31</sup>). <code>fastutil</code>
+	proposes to store <em>big arrays</em> using arrays-of-arrays subject to certain
+	size restrictions and a number of supporting static methods. Please read the documentation
+	of {@link it.unimi.dsi.fastutil.BigArrays} to understand how big arrays work.
+	
+	<p>Correspondingly, <code>fastutil</code> proposes a new interface, called
+	{@link it.unimi.dsi.fastutil.Size64}, that should be implemented by very large
+	collections. {@link it.unimi.dsi.fastutil.Size64} contains a method
+	{@link it.unimi.dsi.fastutil.Size64#size64()} which returns the collection
+	size as a long integer.
+	
+	<p><code>fastutil</code> provides {@linkplain it.unimi.dsi.fastutil.BigList big lists},
+	which are lists with 64-bit indices; of course, they implement {@link it.unimi.dsi.fastutil.Size64}.
+	An implementation based on big arrays is provided (see, e.g., {@link it.unimi.dsi.fastutil.ints.IntBigArrayBigList}),
+	as well as static containers (see, e.g., {@link it.unimi.dsi.fastutil.ints.IntBigLists}).
+	Whereas it is unlikely that such collection will be in main memory as big arrays, there
+	are number of situations, such as exposing large files through a list interface or
+	storing a large amount of data using <a href="http://sux4j.dsi.unimi.it/">succinct data structures</a>,
+	in which a big list interface is natural.
+	
+	<p>Unfortunately, {@linkplain java.util.List lists} and {@linkplain it.unimi.dsi.fastutil.BigList big lists},
+	as well as {@linkplain java.util.ListIterator list iterators} and {@linkplain it.unimi.dsi.fastutil.BigListIterator big-list iterators},
+	cannot be made compatible: we thus provide adapters (see, e.g., {@link it.unimi.dsi.fastutil.ints.IntBigLists#asBigList(it.unimi.dsi.fastutil.ints.IntList)}).
+
+	<p>Finally, <code>fastutil</code> provides {@linkplain it.unimi.dsi.fastutil.longs.LongOpenHashBigSet big hash sets}, which
+	are based on big arrays. They are about 30% slower than non-big sets, but their size is limited only by
+	the amount core memory.
+
+	<h2>More on fast and practical I/O</h2>
+	
+	<P><code>fastutil</code> includes an {@linkplain
+	it.unimi.dsi.fastutil.io I/O package} that provides, for instance, {@linkplain
+	it.unimi.dsi.fastutil.io.FastBufferedInputStream fast, unsynchronized
+	buffered input streams}, {@linkplain
+	it.unimi.dsi.fastutil.io.FastBufferedOutputStream fast, unsynchronized
+	buffered output streams}, and a wealth of static methods to store and
+	retrieve data in {@linkplain it.unimi.dsi.fastutil.io.TextIO textual} and
+	{@linkplain it.unimi.dsi.fastutil.io.BinIO binary} form. The latter, in particular,
+	contain methods that load and store big arrays.
+      
+    <h2>Performance</h2>
+    
+    <p>The main reason behind <code>fastutil</code> is performance, both in
+    time and in space. The relevant methods of type-specific hash maps and sets
+    are something like 2 to 10 times faster than those of the standard
+    classes. Note that performance of hash-based classes on object keys is
+    usually <em>worse</em> (from a few percent to doubled time) than that of
+    {@link java.util}, because <code>fastutil</code> classes do not cache hash
+    codes (albeit it will not be that bad if keys cache internally hash codes,
+    as in the case of {@link java.lang.String}). Of course, you can try to get
+    more speed from hash tables using a small load factors: to this purpose,
+    alternative load factors are proposed in {@link it.unimi.dsi.fastutil.Hash#FAST_LOAD_FACTOR}
+    and {@link it.unimi.dsi.fastutil.Hash#VERY_FAST_LOAD_FACTOR}.
+
+    <p>For tree-based classes you have two choices: AVL and red-black
+      trees. The essential difference is that AVL trees are more balanced (their
+      height is at most 1.44 log <var>n</var>), whereas red-black trees have
+      faster deletions (but their height is at most 2 log <var>n</var>). So on
+      small trees red-black trees could be faster, but on very large sets AVL
+      trees will shine. In general, AVL trees have slightly slower updates but
+      faster searches; however, on very large collections the smaller height may
+      lead in fact to faster updates, too.
+
+    <p><code>fastutil</code> reduces enormously the creation and collection of
+      objects. First of all, if you use the polymorphic methods and iterators no
+      wrapper objects have to be created. Moreover, since <code>fastutil</code>
+      uses open-addressing hashing techniques, creation and garbage collection of
+      hash-table entries are avoided (but tables have to be rehashed whenever
+      they are filled beyond the load factor). The major reduction of the number
+      of objects around has a definite (but very difficult to measure) impact on
+      the whole application (as garbage collection runs proportionally to the
+      number of alive objects).
+
+    <p>Maps whose iteration is very expensive in terms of object creation (e.g., hash-based classes) usually
+      return a type-specific {@link it.unimi.dsi.fastutil.ints.Int2IntMap.FastEntrySet FastEntrySet}
+      whose {@link it.unimi.dsi.fastutil.ints.Int2IntMap.FastEntrySet#fastIterator() fastIterator()}
+      method significantly reduces object creation by returning always
+      the same entry object, suitably mutated.
+
+    <p>Whenever possible, <code>fastutil</code> tries to gain some speed by
+      checking for faster interfaces: for instance, the various set-theoretic
+      methods <code>addAll()</code>, <code>retainAll()</code>, ecc. check whether
+      their arguments are type-specific and use faster iterators and accessors
+      accordingly.
+      
+
+    <h3>Faster Hash Tables</h3>
+
+    <p><code>fastutil</code> 6.1.0 changes significantly the implementation
+    of hash-based classes. Instead of <em>double hashing</em>, we use
+    <em>linear probing</em>. This has some consequences:
+    <ul>
+    <li>the classes are now about two times faster;
+    <li>deletions are effective—there is no “marking” of
+    deleted entries (the claim that this was impossible with open 
+    addressing was, of course, wrong);
+    <li>given a size and a load factor, the backing array of a table will
+    be in general larger (in the worst case about two times larger);
+    <li>it is no longer possible to set the <em>growth factor</em> of the table, which
+    is fixed at 2 (the old methods to control the growth factor {@linkplain
+      it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap#growthFactor(int) are now no-ops}—they
+      are kept just for backward compatibility);
+    <li>there are efficient implementations of {@linkplain it.unimi.dsi.fastutil.ints.IntOpenHashBigSet big sets}.
+    </ul>
+    
+    <h3>Memory Usage</h3>
+
+    <P>The absence of wrappers makes data structures in <code>fastutil</code>
+    much smaller: even in the case of objects, however, data structures in
+    <code>fastutil</code> try to be space-efficient.
+
+    <h4>Hash Tables</h4>
+
+    <p>To avoid memory waste, (unlinked) hash tables in
+      <code>fastutil</code> keep no additional information about elements
+      (such as a list of keys). In particular, this means that enumerations
+      are always linear in the size of the table (rather than in the number
+      of keys). Usually, this would imply slower iterators. Nonetheless, the
+      iterator code includes a single, tight loop; moreover, it is possible
+      to avoid the creation of wrappers. These two facts make in practice
+      <code>fastutil</code> iterators <em>faster</em> than {@link
+      java.util}'s.
+
+    <p>The memory footprint for a table of length &#x2113; is exactly the
+      memory required for the related types times &#x2113;, plus a
+      overhead of &#x2113; booleans to store the state of each entry. The
+      absence of wrappers around primitive types can reduce space occupancy by
+      several times (this applies even more to serialized data, e.g., when you
+      save such a data structure in a file).  These figures can greatly vary with
+      your virtual machine, JVM versions, CPU etc.
+
+    <p>More precisely, when you ask for a map that will hold <var>n</var>
+      elements with load factor 0 < <var>f</var> ≤ 1, 
+      2<sup>⌈log <var>n</var> / <var>f</var>⌉</sup>
+      entries are allocated. When the table is filled up beyond the load factor, it is rehashed 
+      doubling its size. 
+
+    <p>In the case of linked hash tables, there is an additional vector of
+      2<sup>⌈log <var>n</var> / <var>f</var>⌉</sup> longs that is used to store link information. Each
+      element records the next and previous element (packed together so to be more cache friendly). 
+      
+    <h4>Balanced Trees</h4>
+
+    <p>The balanced trees implementation is also very parsimonious.
+      <code>fastutil</code> is based on the excellent (and unbelievably well
+      documented) code contained in Ben Pfaff's <A
+	HREF="http://www.msu.edu/~pfaffben/avl/">GNU libavl</A>, which describes in
+      detail how to handle balanced trees with <em>threads</em>. Thus, the
+      overhead per entry is two pointers and one integer, which compares well to
+      three pointers plus one boolean of the standard tree maps. The trick is
+      that we use the integer bit by bit, so we consume two bits to store thread
+      information, plus one or two bits to handle balancing. As a result, we get
+      bidirectional iterators in constant space and amortized constant time
+      without having to store references to parent nodes.
+      
+    <P>It should be mentioned that all tree-based classes have a fixed overhead
+      for some arrays that are used as stacks to simulate recursion; in
+      particular, we need 48 booleans for AVL trees and 64 pointers plus 64
+      booleans for red-black trees.
+
+    <h2><A NAME="example"></A>An Example</h2>
+
+    <P>Suppose you want to store a sorted map from longs to integers. The first
+      step is to define a variable of the right interface, and assign it a new
+      tree map (say, of the AVL type):
+    <PRE>
+Long2IntSortedMap m = new Long2IntAVLTreeMap();
+    </PRE>
+    <P>Now we can easily modify and access its content:
+    <PRE>
+m.put( 1, 5 );
+m.put( 2, 6 );
+m.put( 3, 7 );
+m.put( 1000000000L, 10 );
+m.get( 1 ); // This method call will return 5
+m.get( 4 ); // This method call will return 0
+    </PRE>
+    <P>We can also try to change the default return value:
+    <PRE>
+m.defaultReturnValue( -1 );
+m.get( 4 ); // This method call will return -1
+    </PRE>
+    <P>We can obtain a type-specific iterator on the key set:
+    <PRE>
+LongBidirectionalIterator i = m.keySet().iterator();
+// Now we sum all keys
+long s = 0;
+while( i.hasNext() ) s += i.nextLong();
+    </PRE>
+    <P>We now generate a head map, and iterate bidirectionally over it starting
+      from a given point:
+    <PRE>
+// This map contains only keys smaller than 4
+Long2IntSortedMap m1 = m.headMap( 4 );
+// This iterator is positioned between 2 and 3
+LongBidirectionalIterator t = m1.keySet().iterator( 2 );
+t.previous(); // This method call will return 2 (t.next() would return 3)
+    </PRE>
+    <P>Should we need to access the map concurrently, we can wrap it:
+    <PRE>
+// This map can be safely accessed by many threads
+Long2IntSortedMap m2 = Long2IntSortedMaps.synchronize( m1 );
+    </PRE>
+    <P>Linked maps are very flexible data structures which can be used to implement, for
+      instance, queues whose content can be probed efficiently:
+    <PRE>
+// This map remembers insertion order (note that we are using the array-based constructor)
+IntSortedSet s = new IntLinkedOpenHashSet( new int[] { 4, 3, 2, 1 } );
+s.firstInt(); // This method call will return 4
+s.lastInt(); // This method call will return 1
+s.contains(5); // This method will return false
+IntBidirectionalIterator i = s.iterator( s.lastInt() ); // We could even cast it to a list iterator 
+i.previous(); // This method call will return 1
+i.previous(); // This method call will return 2
+s.remove(s.lastInt()); // This will remove the last element in constant time
+    </PRE>
+    <P>Now, we play with iterators. It is easy to create iterators over
+      intervals or over arrays, and combine them:
+    <PRE>
+IntIterator i = IntIterators.fromTo( 0, 10 ); // This iterator will return 0, 1, ..., 9
+int[] a = new int[] { 5, 1, 9 };
+IntIterator j = IntIterators.wrap( a ); // This iterator will return 5, 1, 9.
+IntIterator k = IntIterators.concat( new IntIterator[] { i , j } ); // This iterator will return 0, 1, ..., 9, 5, 1, 9
+    </PRE>
+<p>It is easy to build sets and maps on the fly using the array-based
+      constructors:
+<pre>
+IntSet s = new IntOpenHashSet( new int[] { 1, 2, 3 } ); // This set will contain 1, 2, and 3
+Char2IntMap m = new Char2IntRBTreeMap( new char[] { '@', '-' }, new int[] { 0, 1 } ); // This map will map '@' to 0 and '-' to 1
+</pre>
+    <P>Whenever you have some data structure, it is easy to serialize it in an
+      efficient (buffered) way, or to dump their content in textual form:
+    <PRE>
+BinIO.storeObject( s, "foo" ); // This method call will save s in the file named "foo"
+TextIO.storeInts( s.intIterator(), "foo.txt" ); // This method call will save the content of s in ASCII
+i = TextIO.asIntIterator( "foo.txt" ); // This iterator will parse the file and return the integers therein
+    </PRE>
+  </body>
+</html>
diff --git a/test/it/unimi/dsi/fastutil/ArraysTest.java b/test/it/unimi/dsi/fastutil/ArraysTest.java
new file mode 100644
index 0000000..8376ab2
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ArraysTest.java
@@ -0,0 +1,87 @@
+package it.unimi.dsi.fastutil;
+
+import static org.junit.Assert.assertArrayEquals;
+import it.unimi.dsi.fastutil.ints.AbstractIntComparator;
+
+import org.junit.Test;
+
+public class ArraysTest {
+	
+	@Test
+	public void testMergeSort() {
+		int[] s = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		final int[] a = s.clone();
+		
+		java.util.Arrays.sort( s );
+		int[] sorted = s.clone();
+		Arrays.mergeSort( 0, a.length, new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return a[ k1 ] - a[ k2 ]; 
+			}
+		}, new Swapper() {
+			@Override
+			public void swap( int k1, int k2 ) {
+				final int t = a[ k1 ];
+				a[ k1 ] = a[ k2 ];
+				a[ k2 ] = t;
+			}
+		});
+		assertArrayEquals( sorted, a );
+
+		Arrays.mergeSort( 0, a.length, new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return a[ k1 ] - a[ k2 ]; 
+			}
+		}, new Swapper() {
+			@Override
+			public void swap( int k1, int k2 ) {
+				final int t = a[ k1 ];
+				a[ k1 ] = a[ k2 ];
+				a[ k2 ] = t;
+			}
+		});
+		assertArrayEquals( sorted, a );
+
+	}
+
+	@Test
+	public void testQuickSort() {
+		int[] s = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		
+		java.util.Arrays.sort( s );
+		int[] sorted = s.clone();
+
+		final int[] a = s.clone();
+		Arrays.quickSort( 0, a.length, new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return a[ k1 ] - a[ k2 ]; 
+			}
+		}, new Swapper() {
+			@Override
+			public void swap( int k1, int k2 ) {
+				final int t = a[ k1 ];
+				a[ k1 ] = a[ k2 ];
+				a[ k2 ] = t;
+			}
+		});
+		assertArrayEquals( sorted, a );
+
+		Arrays.quickSort( 0, a.length, new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return a[ k1 ] - a[ k2 ]; 
+			}
+		}, new Swapper() {
+			@Override
+			public void swap( int k1, int k2 ) {
+				final int t = a[ k1 ];
+				a[ k1 ] = a[ k2 ];
+				a[ k2 ] = t;
+			}
+		});
+		assertArrayEquals( sorted, a );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/BigArraysTest.java b/test/it/unimi/dsi/fastutil/BigArraysTest.java
new file mode 100644
index 0000000..4d90b0c
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/BigArraysTest.java
@@ -0,0 +1,84 @@
+package it.unimi.dsi.fastutil;
+
+import it.unimi.dsi.fastutil.BigArrays;
+import it.unimi.dsi.fastutil.BigSwapper;
+import it.unimi.dsi.fastutil.ints.IntBigArrays;
+import it.unimi.dsi.fastutil.longs.AbstractLongComparator;
+
+import java.util.Arrays;
+
+import static org.junit.Assert.*;
+import org.junit.Test;
+
+public class BigArraysTest {
+	
+	@Test
+	public void testMergeSort() {
+		int[] s = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		final int[][] a = IntBigArrays.wrap( s.clone()  );
+		
+		Arrays.sort( s );
+		int[][] sorted = IntBigArrays.wrap( s.clone() );
+		BigArrays.mergeSort( 0, IntBigArrays.length( a ), new AbstractLongComparator() {
+			@Override
+			public int compare( long k1, long k2 ) {
+				return IntBigArrays.get( a, k1 ) - IntBigArrays.get( a, k2 ); 
+			}
+		}, new BigSwapper() {
+			@Override
+			public void swap( long k1, long k2 ) {
+				IntBigArrays.swap( a, k1, k2 );
+			}
+		});
+		assertArrayEquals( sorted, a );
+
+		BigArrays.mergeSort( 0, IntBigArrays.length( a ), new AbstractLongComparator() {
+			@Override
+			public int compare( long k1, long k2 ) {
+				return IntBigArrays.get( a, k1 ) - IntBigArrays.get( a, k2 ); 
+			}
+		}, new BigSwapper() {
+			@Override
+			public void swap( long k1, long k2 ) {
+				IntBigArrays.swap( a, k1, k2 );
+			}
+		});
+		assertArrayEquals( sorted, a );
+
+	}
+
+	@Test
+	public void testQuickSort() {
+		int[] s = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		
+		Arrays.sort( s );
+		int[][] sorted = IntBigArrays.wrap( s.clone() );
+
+		final int[][] a = IntBigArrays.wrap( s.clone()  );
+		BigArrays.quickSort( 0, IntBigArrays.length( a ), new AbstractLongComparator() {
+			@Override
+			public int compare( long k1, long k2 ) {
+				return IntBigArrays.get( a, k1 ) - IntBigArrays.get( a, k2 ); 
+			}
+		}, new BigSwapper() {
+			@Override
+			public void swap( long k1, long k2 ) {
+				IntBigArrays.swap( a, k1, k2 );
+			}
+		});
+		assertArrayEquals( sorted, a );
+
+		BigArrays.quickSort( 0, IntBigArrays.length( a ), new AbstractLongComparator() {
+			@Override
+			public int compare( long k1, long k2 ) {
+				return IntBigArrays.get( a, k1 ) - IntBigArrays.get( a, k2 ); 
+			}
+		}, new BigSwapper() {
+			@Override
+			public void swap( long k1, long k2 ) {
+				IntBigArrays.swap( a, k1, k2 );
+			}
+		});
+		assertArrayEquals( sorted, a );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/bytes/ByteArrayFrontCodedListTest.java b/test/it/unimi/dsi/fastutil/bytes/ByteArrayFrontCodedListTest.java
new file mode 100644
index 0000000..cc09e82
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/bytes/ByteArrayFrontCodedListTest.java
@@ -0,0 +1,125 @@
+package it.unimi.dsi.fastutil.bytes;
+
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+ at SuppressWarnings({"rawtypes", "unchecked"})
+public class ByteArrayFrontCodedListTest {
+
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static byte genKey() {
+		return (byte)( r.nextInt() );
+	}
+
+	private static boolean contentEquals( java.util.List x, java.util.List y ) {
+		if ( x.size() != y.size() ) return false;
+		for ( int i = 0; i < x.size(); i++ )
+			if ( !java.util.Arrays.equals( (byte[])x.get( i ), (byte[])y.get( i ) ) ) return false;
+		return true;
+	}
+
+	private static int l[];
+
+	private static byte[][] a;
+
+	private static void test( int n ) throws IOException, ClassNotFoundException {
+		l = new int[ n ];
+		a = new byte[ n ][];
+		for ( int i = 0; i < n; i++ )
+			l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 );
+		for ( int i = 0; i < n; i++ )
+			a[ i ] = new byte[ l[ i ] ];
+		for ( int i = 0; i < n; i++ )
+			for ( int j = 0; j < l[ i ]; j++ )
+				a[ i ][ j ] = genKey();
+		ByteArrayFrontCodedList m = new ByteArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 );
+		it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a );
+		// System.out.println(m);
+		// for( i = 0; i < t.size(); i++ )
+		// System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i)));
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) );
+		/* Now we check cloning. */
+		assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) );
+		/* Now we play with iterators. */
+		{
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator();
+			j = t.listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (byte[])i.next(), (byte[])j.next() ) );
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (byte[])i.previous(), (byte[])j.previous() ) );
+				}
+				assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() );
+			}
+		}
+		{
+			int from = r.nextInt( m.size() + 1 );
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator( from );
+			j = t.listIterator( from );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (byte[])i.next(), (byte[])j.next() ) );
+					// System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (byte[])i.previous(), (byte[])j.previous() ) );
+				}
+			}
+		}
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (ByteArrayFrontCodedList)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1 );
+	}
+
+	@Test
+	public void test10() throws Exception, ClassNotFoundException {
+		test( 10 );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100 );
+	}
+
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000 );
+	}
+
+	@Test
+	public void test10000() throws IOException, ClassNotFoundException {
+		test( 10000 );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/bytes/ByteArraysTest.java b/test/it/unimi/dsi/fastutil/bytes/ByteArraysTest.java
new file mode 100644
index 0000000..1289ba9
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/bytes/ByteArraysTest.java
@@ -0,0 +1,141 @@
+package it.unimi.dsi.fastutil.bytes;
+
+import static org.junit.Assert.assertTrue;
+import java.util.Random;
+
+import org.junit.Test;
+
+public class ByteArraysTest {
+	
+	private static byte[] castIdentity( int n ) {
+		final byte[] a = new byte[ n ];
+		while( n-- != 0 ) a[ n ] = (byte)n;
+		return a;
+	}
+
+
+	@Test
+	public void testRadixSort1() {
+		byte[] t = { 2, 1, 0, 4 };
+		ByteArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = new byte[] { 2, (byte)-1, 0, (byte)-4 };
+		ByteArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = ByteArrays.shuffle( castIdentity( 100 ), new Random( 0 ) );
+		ByteArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new byte[ 100 ];
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new byte[ 100000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new byte[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		byte[][] d = new byte[ 2 ][];
+
+		d[ 0 ] = new byte[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( 3 - i % 3 );
+		d[ 1 ] = ByteArrays.shuffle( castIdentity( 10 ), new Random( 0 ) );
+		ByteArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new byte[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( 100 - i % 100 );
+		d[ 1 ] = ByteArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) );
+		ByteArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new byte[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( i % 3 - 2 );
+		Random random = new Random( 0 );
+		d[ 1 ] = new byte[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new byte[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt();
+		d[ 1 ] = new byte[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new byte[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt();
+		d[ 1 ] = new byte[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort() {
+		byte[][] t = { { 2, 1, 0, 4 } };
+		ByteArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+		
+		t[ 0 ] = ByteArrays.shuffle( castIdentity( 100 ), new Random( 0 ) );
+		ByteArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+
+		byte[][] d = new byte[ 2 ][];
+
+		d[ 0 ] = new byte[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( 3 - i % 3 );
+		d[ 1 ] = ByteArrays.shuffle( castIdentity( 10 ), new Random( 0 ) );
+		ByteArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new byte[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)( 100 - i % 100 );
+		d[ 1 ] = ByteArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) );
+		ByteArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new byte[ 10 ];
+		Random random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt();
+		d[ 1 ] = new byte[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new byte[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt();
+		d[ 1 ] = new byte[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new byte[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (byte)random.nextInt();
+		d[ 1 ] = new byte[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (byte)random.nextInt();
+		ByteArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/chars/CharArrayFrontCodedListTest.java b/test/it/unimi/dsi/fastutil/chars/CharArrayFrontCodedListTest.java
new file mode 100644
index 0000000..23b6be5
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/chars/CharArrayFrontCodedListTest.java
@@ -0,0 +1,126 @@
+package it.unimi.dsi.fastutil.chars;
+
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+ at SuppressWarnings({ "rawtypes", "unchecked" })
+public class CharArrayFrontCodedListTest {
+
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static char genKey() {
+		return (char)( r.nextInt() );
+	}
+
+	private static boolean contentEquals( java.util.List x, java.util.List y ) {
+		if ( x.size() != y.size() ) return false;
+		for ( int i = 0; i < x.size(); i++ )
+			if ( !java.util.Arrays.equals( (char[])x.get( i ), (char[])y.get( i ) ) ) return false;
+		return true;
+	}
+
+	private static int l[];
+
+	private static char[][] a;
+
+	private static void test( int n ) throws IOException, ClassNotFoundException {
+		l = new int[ n ];
+		a = new char[ n ][];
+		for ( int i = 0; i < n; i++ )
+			l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 );
+		for ( int i = 0; i < n; i++ )
+			a[ i ] = new char[ l[ i ] ];
+		for ( int i = 0; i < n; i++ )
+			for ( int j = 0; j < l[ i ]; j++ )
+				a[ i ][ j ] = genKey();
+		CharArrayFrontCodedList m = new CharArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 );
+		it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a );
+		// System.out.println(m);
+		// for( i = 0; i < t.size(); i++ )
+		// System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i)));
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) );
+		/* Now we check cloning. */
+		assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) );
+		/* Now we play with iterators. */
+		{
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator();
+			j = t.listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (char[])i.next(), (char[])j.next() ) );
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (char[])i.previous(), (char[])j.previous() ) );
+				}
+				assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() );
+			}
+		}
+		{
+			int from = r.nextInt( m.size() + 1 );
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator( from );
+			j = t.listIterator( from );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (char[])i.next(), (char[])j.next() ) );
+					// System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (char[])i.previous(), (char[])j.previous() ) );
+				}
+			}
+		}
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (CharArrayFrontCodedList)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) );
+		return;
+	}
+
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1 );
+	}
+
+	@Test
+	public void test10() throws Exception, ClassNotFoundException {
+		test( 10 );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100 );
+	}
+
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000 );
+	}
+
+	@Test
+	public void test10000() throws IOException, ClassNotFoundException {
+		test( 10000 );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/chars/CharArraysTest.java b/test/it/unimi/dsi/fastutil/chars/CharArraysTest.java
new file mode 100644
index 0000000..f8a46f5
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/chars/CharArraysTest.java
@@ -0,0 +1,141 @@
+package it.unimi.dsi.fastutil.chars;
+
+import static org.junit.Assert.assertTrue;
+import java.util.Random;
+
+import org.junit.Test;
+
+public class CharArraysTest {
+	
+	private static char[] castIdentity( int n ) {
+		final char[] a = new char[ n ];
+		while( n-- != 0 ) a[ n ] = (char)n;
+		return a;
+	}
+
+
+	@Test
+	public void testRadixSort1() {
+		char[] t = { 2, 1, 0, 4 };
+		CharArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = new char[] { 2, (char)-1, 0, (char)-4 };
+		CharArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = CharArrays.shuffle( castIdentity( 100 ), new Random( 0 ) );
+		CharArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new char[ 100 ];
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (char)random.nextInt();
+		CharArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new char[ 100000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (char)random.nextInt();
+		CharArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new char[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (char)random.nextInt();
+		CharArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		char[][] d = new char[ 2 ][];
+
+		d[ 0 ] = new char[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( 3 - i % 3 );
+		d[ 1 ] = CharArrays.shuffle( castIdentity( 10 ), new Random( 0 ) );
+		CharArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new char[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( 100 - i % 100 );
+		d[ 1 ] = CharArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) );
+		CharArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new char[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( i % 3 - 2 );
+		Random random = new Random( 0 );
+		d[ 1 ] = new char[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt();
+		CharArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new char[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt();
+		d[ 1 ] = new char[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt();
+		CharArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new char[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt();
+		d[ 1 ] = new char[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt();
+		CharArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort() {
+		char[][] t = { { 2, 1, 0, 4 } };
+		CharArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+		
+		t[ 0 ] = CharArrays.shuffle( castIdentity( 100 ), new Random( 0 ) );
+		CharArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+
+		char[][] d = new char[ 2 ][];
+
+		d[ 0 ] = new char[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( 3 - i % 3 );
+		d[ 1 ] = CharArrays.shuffle( castIdentity( 10 ), new Random( 0 ) );
+		CharArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new char[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)( 100 - i % 100 );
+		d[ 1 ] = CharArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) );
+		CharArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new char[ 10 ];
+		Random random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt();
+		d[ 1 ] = new char[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt();
+		CharArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new char[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt();
+		d[ 1 ] = new char[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt();
+		CharArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new char[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (char)random.nextInt();
+		d[ 1 ] = new char[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (char)random.nextInt();
+		CharArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/doubles/DoubleArraysTest.java b/test/it/unimi/dsi/fastutil/doubles/DoubleArraysTest.java
new file mode 100644
index 0000000..27b99b2
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/doubles/DoubleArraysTest.java
@@ -0,0 +1,540 @@
+package it.unimi.dsi.fastutil.doubles;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.ints.IntArrays;
+
+import java.util.Random;
+
+import org.junit.Test;
+
+public class DoubleArraysTest {
+	
+	private static double[] identity( int n ) {
+		final double[] a = new double[ n ];
+		while( n-- != 0 ) a[ n ] = n;
+		return a;
+	}
+
+	private static int[] identityInt( int n ) {
+		final int[] a = new int[ n ];
+		while( n-- != 0 ) a[ n ] = n;
+		return a;
+	}
+
+
+	@Test
+	public void testRadixSort1() {
+		double[] t = { 2, 1, 0, 4 };
+		DoubleArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = new double[] { 2, -1, 0, -4 };
+		DoubleArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		DoubleArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new double[ 100 ];
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		DoubleArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new double[ 100000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		DoubleArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new double[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		DoubleArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		double[][] d = new double[ 2 ][];
+
+		d[ 0 ] = new double[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3;
+		d[ 1 ] = DoubleArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		DoubleArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new double[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100;
+		d[ 1 ] = DoubleArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		DoubleArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new double[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2;
+		Random random = new Random( 0 );
+		d[ 1 ] = new double[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		DoubleArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new double[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new double[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		DoubleArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new double[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new double[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		DoubleArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort() {
+		double[][] t = { { 2, 1, 0, 4 } };
+		DoubleArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+		
+		t[ 0 ] = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		DoubleArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+
+		double[][] d = new double[ 2 ][];
+
+		d[ 0 ] = new double[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3;
+		d[ 1 ] = DoubleArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		DoubleArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new double[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100;
+		d[ 1 ] = DoubleArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		DoubleArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new double[ 10 ];
+		Random random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new double[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		DoubleArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new double[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new double[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		DoubleArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new double[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new double[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		DoubleArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSortIndirectStable() {
+		double[] d = { 2, 1, 0, 4 };
+		int[] perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+		
+		d = new double[ d.length ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		d = new double[] { 2, -1, 0, -4 };
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+		
+		d = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		d = new double[ 100 ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		Random random = new Random( 0 );
+		for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		d = new double[ d.length ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		d = new double[ d.length ];
+		for( int i = 0; i < d.length; i++ ) d[ i ] = random.nextInt( 4 ); 
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) if ( d[ perm[ i ] ] == d[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] );
+
+		d = new double[ 100 ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		random = new Random( 0 );
+		for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, d, 10, 90, true );
+		for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+		for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] );
+		for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] );
+
+		d = new double[ 100000 ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		random = new Random( 0 );
+		for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		IntArrays.shuffle( perm, new Random( 0 ) );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		d = new double[ 10000000 ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		random = new Random( 0 );
+		for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		d = new double[ d.length ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+
+		d = new double[ d.length ];
+		for( int i = 0; i < d.length; i++ ) d[ i ] = random.nextInt( 8 ); 
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, true );
+		for( int i = d.length - 1; i-- != 0; ) if ( d[ perm[ i ] ] == d[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] );
+	}
+	
+	@Test
+	public void testRadixSortIndirectUnstable() {
+		double[] d = { 2, 1, 0, 4 };
+		int[] perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+		
+		d = new double[ d.length ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		d = new double[] { 2, -1, 0, -4 };
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+		
+		d = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		d = new double[ 100 ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		Random random = new Random( 0 );
+		for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		d = new double[ 100 ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		random = new Random( 0 );
+		for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, d, 10, 90, false );
+		for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+		for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] );
+		for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] );
+
+		d = new double[ 100000 ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		random = new Random( 0 );
+		for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		IntArrays.shuffle( perm, new Random( 0 ) );
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		d = new double[ 10000000 ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		random = new Random( 0 );
+		for( int i = d.length; i-- != 0; ) d[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertTrue( d[ perm[ i ] ] <= d[ perm[ i + 1 ] ] );
+
+		d = new double[ d.length ];
+		perm = it.unimi.dsi.fastutil.ints.IntArraysTest.identity( d.length );
+		DoubleArrays.radixSortIndirect( perm, d, false );
+		for( int i = d.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+	}
+	
+	@Test
+	public void testRadixSort2IndirectStable() {
+		double[] t = { 2, 1, 0, 4 };
+		double[] u = { 3, 2, 1, 0 };
+		int[] perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		
+		t = new double[ t.length ];
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+		
+		t = new double[ t.length ];
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		t = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		u = DoubleArrays.shuffle( identity( 100 ), new Random( 1 ) );
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ 100 ];
+		u = new double[ 100 ];
+		perm = identityInt( t.length );
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ t.length ];
+		u = new double[ t.length ];
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		for( int i = 0; i < u.length; i++ ) t[ i ] = random.nextInt( 4 ); 
+		for( int i = 0; i < u.length; i++ ) u[ i ] = random.nextInt( 4 ); 
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] == u[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] );
+
+		t = new double[ 100 ];
+		u = new double[ 100 ];
+		perm = identityInt( t.length );
+		random = new Random( 0 );
+		for( int i = u.length; i-- != 0; ) u[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, t, u, 10, 90, true );
+		for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+		for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] );
+		for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] );
+
+		t = new double[ 100000 ];
+		u = new double[ 100000 ];
+		perm = identityInt( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		IntArrays.shuffle( perm, new Random( 0 ) );
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ 10000000 ];
+		u = new double[ 10000000 ];
+		perm = identityInt( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ t.length ];
+		u = new double[ t.length ];
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+
+		t = new double[ t.length ];
+		for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); 
+		for( int i = 0; i < t.length; i++ ) u[ i ] = random.nextInt( 8 ); 
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] == u[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] );
+	}
+	
+	@Test
+	public void testRadixSort2IndirectUnstable() {
+		double[] t = { 2, 1, 0, 4 };
+		double[] u = { 3, 2, 1, 0 };
+		int[] perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		
+		t = new double[ t.length ];
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+		
+		t = new double[ t.length ];
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		t = DoubleArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		u = DoubleArrays.shuffle( identity( 100 ), new Random( 1 ) );
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ 100 ];
+		u = new double[ 100 ];
+		perm = identityInt( t.length );
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ t.length ];
+		u = new double[ t.length ];
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		for( int i = 0; i < u.length; i++ ) t[ i ] = random.nextInt( 4 ); 
+		for( int i = 0; i < u.length; i++ ) u[ i ] = random.nextInt( 4 ); 
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] ||  t[ perm[ i ] ] == t[ perm[ i + 1 ] ]&& u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ 100 ];
+		u = new double[ 100 ];
+		perm = identityInt( t.length );
+		random = new Random( 0 );
+		for( int i = u.length; i-- != 0; ) u[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, t, u, 10, 90, false );
+		for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+		for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] );
+		for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] );
+
+		t = new double[ 100000 ];
+		u = new double[ 100000 ];
+		perm = identityInt( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		IntArrays.shuffle( perm, new Random( 0 ) );
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ 10000000 ];
+		u = new double[ 10000000 ];
+		perm = identityInt( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt();
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new double[ t.length ];
+		u = new double[ t.length ];
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+
+		t = new double[ t.length ];
+		for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); 
+		for( int i = 0; i < t.length; i++ ) u[ i ] = random.nextInt( 8 ); 
+		perm = identityInt( t.length );
+		DoubleArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue(i + " " +  t[perm[i]]+ " "+ t[perm[i+1]] + " " + u[perm[i]] + " " + u[perm[i+1]] + "  " + perm[i]+ " " +perm[i+1], t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+	}
+
+
+
+
+	@Test
+	public void testMergeSortNaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final double[] a = t.clone();
+				DoubleArrays.mergeSort( a, from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+
+	@Test
+	public void testRadixSortNaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final double[] a = t.clone();
+				DoubleArrays.radixSort( a, from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+	@Test
+	public void testRadixSortIndirectNaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final int perm[] = new int[ t.length ];
+				for( int i = perm.length; i-- != 0; ) perm[ i ] = i;
+				DoubleArrays.radixSortIndirect( perm, t, from, to, true );
+				for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( t[ perm[ i ] ], t[ perm[ i + 1 ] ] ) <= 0 );
+			}
+		
+	}
+
+	@Test
+	public void testRadixSortIndirect2NaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final int perm[] = new int[ t.length ];
+				for( int i = perm.length; i-- != 0; ) perm[ i ] = i;
+				DoubleArrays.radixSortIndirect( perm, t, t, from, to, true );
+				for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( t[ perm[ i ] ], t[ perm[ i + 1 ] ] ) <= 0 );
+			}
+		
+	}
+
+	@SuppressWarnings("deprecation")
+	@Test
+	public void testQuickSortNaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final double[] a = t.clone();
+				DoubleArrays.quickSort( a, from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+
+}
diff --git a/test/it/unimi/dsi/fastutil/doubles/DoubleBigArraysTest.java b/test/it/unimi/dsi/fastutil/doubles/DoubleBigArraysTest.java
new file mode 100644
index 0000000..3367f8a
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/doubles/DoubleBigArraysTest.java
@@ -0,0 +1,289 @@
+package it.unimi.dsi.fastutil.doubles;
+
+import static org.junit.Assert.*;
+import static it.unimi.dsi.fastutil.doubles.DoubleBigArrays.set;
+import static it.unimi.dsi.fastutil.doubles.DoubleBigArrays.get;
+
+import it.unimi.dsi.fastutil.ints.IntBigArrays;
+
+import java.util.Arrays;
+import java.util.Random;
+
+import org.junit.Test;
+
+public class DoubleBigArraysTest {
+
+	
+	public static double[][] identity( final int n ) {
+		final double[][] perm = DoubleBigArrays.newBigArray( n );
+		for( int i = n; i-- != 0; ) DoubleBigArrays.set( perm, i , i );
+		return perm;
+	}
+
+	@Test
+	public void testQuickSort() {
+		double[] s = new double[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		
+		Arrays.sort( s );
+		double[][] sorted = DoubleBigArrays.wrap( s.clone() );
+
+		double[][] a = DoubleBigArrays.wrap( s.clone()  );
+
+		DoubleBigArrays.quickSort( a );
+		assertArrayEquals( sorted, a );
+
+		DoubleBigArrays.quickSort( a );
+		assertArrayEquals( sorted, a );
+		
+		a = DoubleBigArrays.wrap( s.clone()  );
+		
+		DoubleBigArrays.quickSort( a, DoubleComparators.NATURAL_COMPARATOR );
+		assertArrayEquals( sorted, a );
+
+		DoubleBigArrays.quickSort( a, DoubleComparators.NATURAL_COMPARATOR );
+		assertArrayEquals( sorted, a );
+		
+	}
+
+	private void testCopy( int n ) {
+		double[][] a = DoubleBigArrays.newBigArray( n );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		DoubleBigArrays.copy( a, 0, a, 1, n - 2 );
+		assertEquals( 0, a[ 0 ][ 0 ], 0 );
+		for ( int i = 0; i < n - 2; i++ ) assertEquals( i,  get( a, i + 1 ), 0 );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		DoubleBigArrays.copy( a, 1, a, 0, n - 1 );
+		for ( int i = 0; i < n - 1; i++ ) assertEquals( i + 1, get( a, i ) ,0 );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		double[] b = new double[ n ];
+		for ( int i = 0; i < n; i++ ) b[ i ] = i;
+		assertArrayEquals( a, DoubleBigArrays.wrap( b ) );
+	}
+	
+	@Test
+	public void testCopy10() {
+		testCopy( 10 );
+	}
+
+	@Test
+	public void testCopy1000() {
+		testCopy( 1000 );
+	}
+
+	@Test
+	public void testCopy1000000() {
+		testCopy( 1000000 );
+	}
+
+	@Test
+	public void testBinarySearch() {
+		double[] a = new double[] { 25, 32, 1, 3, 2, 0, 40, 7, 13, 12, 11, 10, -1, -6, -18, 2000 };
+		
+		Arrays.sort( a );
+		double[][] b = DoubleBigArrays.wrap( a.clone() );
+
+		for( int i = -1; i < 20; i++ ) {
+			assertEquals( "" + i, Arrays.binarySearch( a, i ), DoubleBigArrays.binarySearch( b, i ) );
+			assertEquals( "" + i, Arrays.binarySearch( a, i ), DoubleBigArrays.binarySearch( b, i, DoubleComparators.NATURAL_COMPARATOR ) );
+		}
+	
+		for( int i = -1; i < 20; i++ ) {
+			assertEquals( Arrays.binarySearch( a, 5, 13, i ), DoubleBigArrays.binarySearch( b, 5, 13, i ) );
+			assertEquals( Arrays.binarySearch( a, 5, 13, i ), DoubleBigArrays.binarySearch( b, 5, 13, i, DoubleComparators.NATURAL_COMPARATOR ) );
+		}
+	}
+
+	@Test
+	public void testTrim() {
+		double[] a = new double[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		double[][] b = DoubleBigArrays.wrap( a.clone() );
+
+		for( int i = a.length; i-- != 0; ) {
+			double[][] t = DoubleBigArrays.trim( b, i );
+			final long l = DoubleBigArrays.length( t );
+			assertEquals( i, l );
+			for( int p = 0; p < l; p++ ) assertEquals( a[ p ], DoubleBigArrays.get( t, p ), 0 );
+			
+		}
+	}
+
+	@Test
+	public void testEquals() {
+		double[] a = new double[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		double[][] b = DoubleBigArrays.wrap( a.clone() );
+		double[][] c = DoubleBigArrays.wrap( a.clone() );
+
+		assertTrue( DoubleBigArrays.equals( b, c ) );
+		b[ 0 ][ 0 ] = 0;
+		assertFalse( DoubleBigArrays.equals( b, c ) );
+	}
+
+	@Test
+	public void testRadixSort1() {
+		double[][] t = DoubleBigArrays.wrap( new double[] { 2, 1, 0, 4 } );
+		DoubleBigArrays.radixSort( t );
+		for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) );
+		
+		t = DoubleBigArrays.wrap( new double[] { 2, -1, 0, -4 } );
+		DoubleBigArrays.radixSort( t );
+		for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) );
+		
+		t = DoubleBigArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		DoubleBigArrays.radixSort( t );
+		for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) );
+
+		t = DoubleBigArrays.newBigArray( 100 );
+		Random random = new Random( 0 );
+		for( long i = DoubleBigArrays.length( t ); i-- != 0; ) DoubleBigArrays.set( t, i, random.nextInt() );
+		DoubleBigArrays.radixSort( t );
+		for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) );
+
+		t = DoubleBigArrays.newBigArray( 100000 );
+		random = new Random( 0 );
+		for( long i = DoubleBigArrays.length( t ); i-- != 0; ) DoubleBigArrays.set( t, i, random.nextInt() );
+		DoubleBigArrays.radixSort( t );
+		for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) );
+		for( long i = 100; i-- != 10; ) DoubleBigArrays.set( t, i, random.nextInt() );
+		DoubleBigArrays.radixSort( t, 10, 100 );
+		for( long i = 99; i-- != 10; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) );
+
+		t = DoubleBigArrays.newBigArray( 1000000 );
+		random = new Random( 0 );
+		for( long i = DoubleBigArrays.length( t ); i-- != 0; ) DoubleBigArrays.set( t, i, random.nextInt() );
+		DoubleBigArrays.radixSort( t );
+		for( long i = DoubleBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( DoubleBigArrays.get( t, i ) <= DoubleBigArrays.get( t, i + 1 ) );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		double d[][], e[][];
+		d = DoubleBigArrays.newBigArray( 10 );
+		for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, (int)( 3 - i % 3 ) );
+		e = DoubleBigArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		DoubleBigArrays.radixSort( d, e );
+		for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " +  DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) );
+		
+		d = DoubleBigArrays.newBigArray( 100000 );
+		for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, (int)( 100 - i % 100 ) );
+		e = DoubleBigArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		DoubleBigArrays.radixSort( d, e );
+		for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " +  DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) );
+
+		d = DoubleBigArrays.newBigArray( 10 );
+		for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, (int)( i % 3 - 2 ) );
+		Random random = new Random( 0 );
+		e = DoubleBigArrays.newBigArray( DoubleBigArrays.length(  d ) );
+		for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( e, i, random.nextInt() );
+		DoubleBigArrays.radixSort( d, e );
+		for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " +  DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) );
+		
+		d = DoubleBigArrays.newBigArray( 100000 );
+		random = new Random( 0 );
+		for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, random.nextInt() );
+		e = DoubleBigArrays.newBigArray( DoubleBigArrays.length(  d ) );
+		for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( e, i, random.nextInt() );
+		DoubleBigArrays.radixSort( d, e );
+		for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " +  DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) );
+		for( long i = 100; i-- != 10; ) DoubleBigArrays.set( e, i, random.nextInt() );
+		DoubleBigArrays.radixSort( d, e, 10, 100 );
+		for( long i = 99; i-- != 10; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " +  DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) );
+
+		d = DoubleBigArrays.newBigArray( 1000000 );
+		random = new Random( 0 );
+		for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( d, i, random.nextInt() );
+		e = DoubleBigArrays.newBigArray( DoubleBigArrays.length(  d ) );
+		for( long i = DoubleBigArrays.length( d ); i-- != 0; ) DoubleBigArrays.set( e, i, random.nextInt() );
+		DoubleBigArrays.radixSort( d, e );
+		for( long i = DoubleBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + DoubleBigArrays.get( d, i ) + ", " + DoubleBigArrays.get( e, i ) + ">, <" + DoubleBigArrays.get( d, i + 1 ) + ", " +  DoubleBigArrays.get( e, i + 1 ) + ">", DoubleBigArrays.get( d, i ) < DoubleBigArrays.get( d, i + 1 ) || DoubleBigArrays.get( d, i ) == DoubleBigArrays.get( d, i + 1 ) && DoubleBigArrays.get( e, i ) <= DoubleBigArrays.get( e, i + 1 ) );
+	}
+
+
+	@Test
+	public void testShuffle() {
+		double[] a = new double[ 100 ];
+		for( int i = a.length; i-- != 0; ) a[ i ] = i;
+		double[][] b = DoubleBigArrays.wrap( a );
+		DoubleBigArrays.shuffle( b, new Random() );
+		boolean[] c = new boolean[ a.length ];
+		for( long i = DoubleBigArrays.length( b ); i-- != 0; ) {
+			assertFalse( c[ (int)DoubleBigArrays.get( b, i ) ] );
+			c[ (int)DoubleBigArrays.get( b, i ) ] = true;
+		}
+	}
+
+	@Test
+	public void testShuffleFragment() {
+		double[] a = new double[ 100 ];
+		for( int i = a.length; i-- != 0; ) a[ i ] = -1;
+		for( int i = 10; i < 30; i++ ) a[ i ] = i - 10;
+		double[][] b = DoubleBigArrays.wrap( a );
+		DoubleBigArrays.shuffle( b, 10, 30, new Random() );
+		boolean[] c = new boolean[ 20 ];
+		for( int i = 20; i-- != 0; ) {
+			assertFalse( c[ (int)DoubleBigArrays.get( b, i + 10 ) ] );
+			c[ (int)DoubleBigArrays.get( b, i + 10 ) ] = true;
+		}
+	}
+
+	@Test
+	public void testBinarySearchLargeKey() {
+		final double[][] a = DoubleBigArrays.wrap( new double[] { 1, 2, 3 } );
+		DoubleBigArrays.binarySearch( a, 4 );
+	}
+
+	@Test
+	public void testMergeSortNaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final double[] a = t.clone();
+				DoubleArrays.mergeSort( a, from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+
+	@Test
+	public void testRadixSortNaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final double[] a = t.clone();
+				DoubleBigArrays.radixSort( DoubleBigArrays.wrap( a ), from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+	@Test
+	public void testRadixSort2NaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final double[] a = t.clone();
+				final double[] b = t.clone();
+				DoubleBigArrays.radixSort( DoubleBigArrays.wrap( a ), DoubleBigArrays.wrap( b ), from, to );
+				for( int i = to - 1; i-- != from; ) {
+					assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+					assertTrue( Double.compare( b[ i ], b[ i + 1 ] ) <= 0 );
+				}
+			}
+		
+	}
+
+	@SuppressWarnings("deprecation")
+	@Test
+	public void testQuickSortNaNs() {
+		final double[] t = { Double.NaN, 1, 5, 2, 1, 0, 9, 1, Double.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final double[] a = t.clone();
+				DoubleBigArrays.quickSort( DoubleBigArrays.wrap( a ), from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Double.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+
+}
\ No newline at end of file
diff --git a/test/it/unimi/dsi/fastutil/floats/FloatArraysTest.java b/test/it/unimi/dsi/fastutil/floats/FloatArraysTest.java
new file mode 100644
index 0000000..a332a04
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/floats/FloatArraysTest.java
@@ -0,0 +1,210 @@
+package it.unimi.dsi.fastutil.floats;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.Random;
+
+import org.junit.Test;
+
+public class FloatArraysTest {
+	
+	private static float[] identity( int n ) {
+		final float[] a = new float[ n ];
+		while( n-- != 0 ) a[ n ] = n;
+		return a;
+	}
+
+	@Test
+	public void testRadixSort1() {
+		float[] t = { 2, 1, 0, 4 };
+		FloatArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = new float[] { 2, -1, 0, -4 };
+		FloatArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = FloatArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		FloatArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new float[ 100 ];
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		FloatArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new float[ 100000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		FloatArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new float[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		FloatArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		float[][] d = new float[ 2 ][];
+
+		d[ 0 ] = new float[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3;
+		d[ 1 ] = FloatArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		FloatArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new float[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100;
+		d[ 1 ] = FloatArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		FloatArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new float[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2;
+		Random random = new Random( 0 );
+		d[ 1 ] = new float[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		FloatArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new float[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new float[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		FloatArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new float[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new float[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		FloatArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort() {
+		float[][] t = { { 2, 1, 0, 4 } };
+		FloatArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+		
+		t[ 0 ] = FloatArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		FloatArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+
+		float[][] d = new float[ 2 ][];
+
+		d[ 0 ] = new float[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3;
+		d[ 1 ] = FloatArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		FloatArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new float[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100;
+		d[ 1 ] = FloatArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		FloatArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new float[ 10 ];
+		Random random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new float[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		FloatArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new float[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new float[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		FloatArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new float[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new float[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		FloatArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+
+
+
+	@Test
+	public void testMergeSortNaNs() {
+		final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final float[] a = t.clone();
+				FloatArrays.mergeSort( a, from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+
+	@Test
+	public void testRadixSortNaNs() {
+		final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final float[] a = t.clone();
+				FloatArrays.radixSort( a, from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+	@Test
+	public void testRadixSortIndirectNaNs() {
+		final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final int perm[] = new int[ t.length ];
+				for( int i = perm.length; i-- != 0; ) perm[ i ] = i;
+				FloatArrays.radixSortIndirect( perm, t, from, to, true );
+				for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( t[ perm[ i ] ], t[ perm[ i + 1 ] ] ) <= 0 );
+			}
+		
+	}
+
+	@Test
+	public void testRadixSortIndirect2NaNs() {
+		final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final int perm[] = new int[ t.length ];
+				for( int i = perm.length; i-- != 0; ) perm[ i ] = i;
+				FloatArrays.radixSortIndirect( perm, t, t, from, to, true );
+				for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( t[ perm[ i ] ], t[ perm[ i + 1 ] ] ) <= 0 );
+			}
+		
+	}
+
+
+	@SuppressWarnings("deprecation")
+	@Test
+	public void testQuickSortNaNs() {
+		final float[] t = { Float.NaN, 1, 5, 2, 1, 0, 9, 1, Float.NaN, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < t.length; to++ )
+			for( int from = 0; from < to; from++ ) {
+				final float[] a = t.clone();
+				FloatArrays.quickSort( a, from, to );
+				for( int i = to - 1; i-- != from; ) assertTrue( Float.compare( a[ i ], a[ i + 1 ] ) <= 0 );
+			}
+		
+	}
+
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/Int2IntArrayMapTest.java b/test/it/unimi/dsi/fastutil/ints/Int2IntArrayMapTest.java
new file mode 100644
index 0000000..a010419
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/Int2IntArrayMapTest.java
@@ -0,0 +1,112 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.io.BinIO;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectOutputStream;
+import java.util.Map.Entry;
+
+import org.junit.Test;
+
+public class Int2IntArrayMapTest  {
+
+
+	@Test
+	public void testEquals() {
+		Int2IntArrayMap a1 = new Int2IntArrayMap();
+		a1.put(0,  1);
+		a1.put(1000, -1);
+		a1.put(2000, 3);
+
+		Int2IntArrayMap a2 = new Int2IntArrayMap();
+		a2.put(0,  1);
+		a2.put(1000, -1);
+		a2.put(2000, 3);
+
+		assertEquals(a1, a2);
+
+	}
+
+	@Test
+	public void testMap() {
+		for( int i = 0; i <= 1; i++ ) {
+			Int2IntArrayMap m = i == 0 ? new Int2IntArrayMap() : new Int2IntArrayMap( new int[ i ], new int[ i ] );
+			assertEquals( 0, m.put( 1, 1 ) );
+			assertEquals( 1 + i, m.size() );
+			assertTrue( m.containsKey( 1 ) );
+			assertTrue( m.containsValue( 1 ) );
+			assertEquals( 0, m.put(  2, 2  ) );
+			assertTrue( m.containsKey( 2 ) );
+			assertTrue( m.containsValue( 2 ) );
+			assertEquals( 2 + i, m.size() );
+			assertEquals( 1, m.put( 1, 3 ) );
+			assertTrue( m.containsValue( 3 ) );
+			assertEquals( 0, m.remove( 3 ) );
+			assertEquals( 0, m.put(  3, 3  ) );
+			assertTrue( m.containsKey( 3 ) );
+			assertTrue( m.containsValue( 3 ) );
+			assertEquals( 3 + i, m.size() );
+			assertEquals( 3, m.get( 1 ) );
+			assertEquals( 2, m.get( 2 ) );
+			assertEquals( 3, m.get( 3 ) );
+			assertEquals( new IntOpenHashSet( i == 0 ? new int[] { 1, 2, 3 } : new int[] { 0, 1, 2, 3 } ), new IntOpenHashSet( m.keySet().iterator() ) );
+			assertEquals( new IntOpenHashSet( i == 0 ? new int[] { 3, 2, 3 } : new int[] { 0, 3, 2, 3 } ), new IntOpenHashSet( m.values().iterator() ) );
+
+			for( Entry<Integer, Integer> e: m.entrySet() ) assertEquals( e.getValue(), m.get( e.getKey() ) );
+
+			assertTrue( i != 0 == m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 0, 0 ) ) );
+			assertTrue( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 1, 3 ) ) );
+			assertTrue( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 2, 2 ) ) );
+			assertTrue( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 3, 3 ) ) );
+			assertFalse( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 1, 2 ) ) );
+			assertFalse( m.entrySet().contains( new AbstractInt2IntMap.BasicEntry( 2, 1 ) ) );
+
+			assertEquals( 3, m.remove( 3 ) );
+			assertEquals( 2 + i, m.size() );
+			assertEquals( 3, m.remove( 1 ) );
+			assertEquals( 1 + i, m.size() );
+			assertFalse( m.containsKey( 1 ) );
+			assertEquals( 2, m.remove( 2 ) );
+			assertEquals( 0 + i, m.size() );
+			assertFalse( m.containsKey( 1 ) );
+		}
+	}
+	
+	@Test
+	public void testClone() {
+		Int2IntArrayMap m = new Int2IntArrayMap();
+		assertEquals( m, m.clone() );
+		m.put( 0, 1 );
+		assertEquals( m, m.clone() );
+		m.put( 0, 2 );
+		assertEquals( m, m.clone() );
+		m.put( 1, 2 );
+		assertEquals( m, m.clone() );
+		m.remove( 1 );
+		assertEquals( m, m.clone() );
+	}
+
+	@Test
+	public void testSerialisation() throws IOException, ClassNotFoundException {
+		Int2IntArrayMap m = new Int2IntArrayMap();
+		ByteArrayOutputStream baos = new ByteArrayOutputStream();
+		ObjectOutputStream oos = new ObjectOutputStream( baos );
+		oos.writeObject( m );
+		oos.close();
+		assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) );
+		
+		m.put( 0, 1 );
+		m.put( 1, 2 );
+
+		baos.reset();
+		oos = new ObjectOutputStream( baos );
+		oos.writeObject( m );
+		oos.close();
+		assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/Int2IntLinkedOpenHashMapTest.java b/test/it/unimi/dsi/fastutil/ints/Int2IntLinkedOpenHashMapTest.java
new file mode 100644
index 0000000..2c1def6
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/Int2IntLinkedOpenHashMapTest.java
@@ -0,0 +1,495 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.NoSuchElementException;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class Int2IntLinkedOpenHashMapTest {
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	private static int genValue() {
+		return r.nextInt();
+	}
+
+	private static boolean valEquals( Object o1, Object o2 ) {
+		return o1 == null ? o2 == null : o1.equals( o2 );
+	}
+
+	@SuppressWarnings("unchecked")
+	protected static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE, f );
+		Map<Integer,Integer> t = new java.util.LinkedHashMap<Integer,Integer>();
+		/* First of all, we fill t with random data. */
+		for ( int i = 0; i < n; i++ )
+			t.put( ( Integer.valueOf( genKey() ) ), ( Integer.valueOf( genValue() ) ) );
+		/* Now we add to m the same data */
+		m.putAll( t );
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator<?> i = t.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry<?, ?> e = (java.util.Map.Entry<?, ?>)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator<?> i = m.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry<?, ?> e = (java.util.Map.Entry<?, ?>)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds the same keys. */
+		for ( java.util.Iterator<Integer> i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key (" + o + ") after insertion (iterating on t)", m.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after insertion (iterating on t)", m.keySet().contains( o ) );
+		}
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for ( java.util.Iterator<?> i = m.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key after insertion (iterating on m)", t.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (in keySet()) after insertion (iterating on m)", t.keySet().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values. */
+		for ( java.util.Iterator<Integer> i = t.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after insertion (iterating on t)", m.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on t)", m.values().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for ( java.util.Iterator<?> i = m.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after insertion (iterating on m)", t.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on m)", t.values().contains( o ) );
+		}
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.containsKey( ( Integer.valueOf( T ) ) ) == t.containsKey( ( Integer.valueOf( T ) ) ) );
+			assertFalse( "Error: divergence between t and m (polymorphic method)", ( m.get( T ) != ( 0 ) ) != ( ( t.get( ( Integer.valueOf( T ) ) ) == null ? ( 0 ) : ( ( ( t.get( ( Integer.valueOf( T ) ) ) ).intValue() ) ) ) != ( 0 ) ) || t.get( ( Integer.valueOf( T ) ) ) != null && !m.get( ( Integer.valueOf( T ) ) ).equals( t.get( ( Integer.valueOf( T ) ) ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence between t and m (standard method)", valEquals( m.get( ( Integer.valueOf( T ) ) ), t.get( ( Integer.valueOf( T ) ) ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			int U = genValue();
+			assertTrue( "Error: divergence in put() between t and m", valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m", valEquals( m.remove( ( Integer.valueOf( T ) ) ), t.remove( ( Integer.valueOf( T ) ) ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		/* Now we check that m actually holds the same data. */
+		for ( java.util.Iterator<?> i = t.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry<?, ?> e = (java.util.Map.Entry<?, ?>)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator<?> i = m.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry<?, ?> e = (java.util.Map.Entry<?, ?>)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds the same keys. */
+		for ( java.util.Iterator<Integer> i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key (" + o + ") after removal (iterating on t)", m.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after removal (iterating on t)", m.keySet().contains( o ) );
+		}
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for ( java.util.Iterator<?> i = m.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key after removal (iterating on m)", t.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (in keySet()) after removal (iterating on m)", t.keySet().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values. */
+		for ( java.util.Iterator<Integer> i = t.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after removal (iterating on t)", m.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on t)", m.values().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for ( java.util.Iterator<?> i = m.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after removal (iterating on m)", t.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on m)", t.values().contains( o ) );
+		}
+		int h = m.hashCode();
+		/* Now we save and read m. */
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (Int2IntLinkedOpenHashMap)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() );
+
+		assertEquals( "Error: clone()", m, m.clone() );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator<Integer> i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on an entry after save/read", valEquals( m.get( o ), t.get( o ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			int U = genValue();
+			assertTrue( "Error: divergence in put() between t and m after save/read", valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m after save/read", valEquals( m.remove( ( Integer.valueOf( T ) ) ), t.remove( ( Integer.valueOf( T ) ) ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) );
+		/* Now we play with iterators. */
+		{
+			java.util.ListIterator<?> i, j;
+			Object J;
+			Map.Entry<Integer,Integer> E, F;
+			i = (java.util.ListIterator<?>)m.entrySet().iterator();
+			j = new java.util.LinkedList<Object>( t.entrySet() ).listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext()" ,  i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious()" ,  i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next()" ,  ( E = (java.util.Map.Entry<Integer, Integer>)i.next() ).getKey().equals( J = ( F = (Map.Entry<Integer, Integer>)j.next() ).getKey() ) );
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Integer U = Integer.valueOf( genValue() );
+						E.setValue( U );
+						t.put( F.getKey(), U );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous()" ,  ( E = (java.util.Map.Entry<Integer, Integer>)i.previous() ).getKey().equals( J = ( F = (Map.Entry<Integer, Integer>)j.previous() ).getKey() ) );
+					if ( r.nextFloat() < 0.3 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+					else if ( r.nextFloat() < 0.3 ) {
+						Integer U = Integer.valueOf( genValue() );
+						E.setValue( U );
+						t.put( F.getKey(), U );
+					}
+				}
+				assertTrue( "Error: divergence in nextIndex()" ,  i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex()" ,  i.previousIndex() == j.previousIndex() );
+			}
+		}
+		if ( t.size() > 0 ) {
+			java.util.ListIterator<Integer> i, j;
+			Object J;
+			j = new java.util.LinkedList<Integer>( t.keySet() ).listIterator();
+			int e = r.nextInt( t.size() );
+			Object from;
+			do
+				from = j.next();
+			while ( e-- != 0 );
+			i = (java.util.ListIterator<Integer>)m.keySet().iterator( ( ( ( (Integer)( from ) ).intValue() ) ) );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")" ,  i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")" ,  i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")" ,  i.next().equals( J = j.next() ) );
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")" ,  i.previous().equals( J = j.previous() ) );
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				assertTrue( "Error: divergence in nextIndex() (iterator with starting point " + from + ")" ,  i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex() (iterator with starting point " + from + ")" ,  i.previousIndex() == j.previousIndex() );
+			}
+		}
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error: ! m.equals( t ) after iteration" ,  m.equals( t ) );
+		assertTrue( "Error: ! t.equals( m ) after iteration" ,  t.equals( m ) );
+		/* Now we take out of m everything, and check that it is empty. */
+		for ( java.util.Iterator<Integer> i = t.keySet().iterator(); i.hasNext(); )
+			m.remove( i.next() );
+		assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() );
+		/*
+		 * Now we check that the iteration order of m is properly affected, using random movements
+		 */
+		{
+			m.clear();
+			final java.util.Deque<Integer> d = new java.util.ArrayDeque<Integer>();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				int T = genKey();
+				int U = genValue();
+				boolean dr = d.remove( ( Integer.valueOf( T ) ) );
+				int rU = m.put( T, U );
+				assertTrue( "Error: deque reported previous key differently than map." ,  dr == ( m.defaultReturnValue() != rU ) );
+				if ( 1 == ( r.nextInt( 2 ) % 2 ) ) {
+					d.addFirst( ( Integer.valueOf( T ) ) );
+					m.getAndMoveToFirst( T );
+				}
+				else {
+					d.addLast( ( Integer.valueOf( T ) ) );
+					m.getAndMoveToLast( T );
+				}
+			}
+			// Iteration order should be identical
+			assertTrue( "Error: Iteration order of map different than iteration order of deque." ,  new java.util.ArrayList<Object>( m.keySet() ).equals( new java.util.ArrayList<Integer>( d ) ) );
+		}
+		m.clear();
+		t.clear();
+		m.trim();
+		assertTrue( "Error: !m.equals(t) after rehash()", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after rehash()", t.equals( m ) );
+		m.trim();
+		assertTrue( "Error: !m.equals(t) after trim()", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after trim()", t.equals( m ) );
+
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void testAdd() {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		assertEquals( 0, m.add( 0, 2 ) );
+		assertEquals( 2, m.get( 0 ) );
+		assertEquals( 2, m.add( 0, 3 ) );
+		assertEquals( 5, m.get( 0 ) );
+		ObjectIterator<Int2IntMap.Entry> fastIterator = m.int2IntEntrySet().fastIterator();
+		Int2IntMap.Entry next = fastIterator.next();
+		assertEquals( 0, next.getIntKey() );
+		assertEquals( 5, next.getIntValue() );
+		assertFalse( fastIterator.hasNext() );
+		
+		m.defaultReturnValue( -1 );
+		assertEquals( -1, m.add( 1, 1 ) );
+		assertEquals( 0, m.get( 1 ) );
+		assertEquals( 0, m.add( 1, 1 ) );
+		assertEquals( 1, m.get( 1 ) );
+		assertEquals( 1, m.add( 1, -2 ) );
+		assertEquals( -1, m.get( 1 ) );
+		fastIterator = m.int2IntEntrySet().fastIterator();
+		next = fastIterator.next();
+		assertEquals( 0, next.getIntKey() );
+		assertEquals( 5, next.getIntValue() );
+		next = fastIterator.next();
+		assertEquals( 1, next.getIntKey() );
+		assertEquals( -1, next.getIntValue() );
+		assertFalse( fastIterator.hasNext() );
+		
+		for( int i = 0; i < 100; i++ ) m.add( i, 1 );
+		assertEquals( 0, m.firstIntKey() );
+		assertEquals( 99, m.lastIntKey() );
+	}
+
+	@Test
+	public void testPut() {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		assertEquals( 0, m.put( 0, 2 ) );
+		assertEquals( 2, m.put( 0, 3 ) );
+		assertEquals( 3, m.get( 0 ) );
+		assertEquals( null, m.put( Integer.valueOf( 1 ), Integer.valueOf( 2 ) ) );
+		assertEquals( Integer.valueOf( 2 ), m.put( Integer.valueOf( 1 ), Integer.valueOf( 3 ) ) );
+		assertEquals( Integer.valueOf( 3 ), m.get( Integer.valueOf( 0 ) ) );
+	}
+
+	@Test
+	public void testRemove() {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		m.defaultReturnValue( -1 );
+		for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) );
+		for( int i = 0; i < 100; i++ ) assertEquals( -1, m.remove( i + 100 ) );
+		for( int i = 50; i < 150; i++ ) assertEquals( i % 100, m.remove( i % 100 ) );
+	}
+
+	@Test
+	public void testContainsValue() {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		assertEquals( 0, m.put( 0, 2 ) );
+		assertEquals( 0, m.put( 1, 3 ) );
+		assertTrue( m.containsValue( 2 ) );
+		assertTrue( m.containsValue( 3 ) );
+		assertFalse( m.containsValue( 4 ) );
+		assertTrue( m.containsKey( 0 ) );
+		assertTrue( m.containsKey( 1 ) );
+		assertFalse( m.containsKey( 2 ) );
+	}
+	
+	@Test
+	public void testIterator() {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		m.defaultReturnValue( -1 );
+		for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) );
+		assertEquals( 0, m.firstIntKey() );
+		
+		IntListIterator iterator = (IntListIterator)m.keySet().iterator();
+		for( int i = 0; i <= 100; i++ ) {
+			assertEquals( Integer.toString( i ), i - 1, iterator.previousIndex() );
+			assertEquals( Integer.toString( i ), i, iterator.nextIndex() );
+			if ( i != 100 ) assertEquals( Integer.toString( i ), i, iterator.nextInt() );
+		}
+
+		iterator = (IntListIterator)m.keySet().iterator( m.lastIntKey() );
+		for( int i = 100; i-- != 0; ) {
+			assertEquals( Integer.toString( i ), i, iterator.previousIndex() );
+			assertEquals( Integer.toString( i ), i + 1, iterator.nextIndex() );
+			if ( i != 0 ) assertEquals( Integer.toString( i ), i, iterator.previousInt() );
+		}
+
+		iterator = (IntListIterator)m.keySet().iterator( 50 );
+		for( int i = 50; i < 100; i++ ) {
+			assertEquals( Integer.toString( i ), i, iterator.previousIndex() );
+			assertEquals( Integer.toString( i ), i + 1, iterator.nextIndex() );
+			if ( i != 99 ) assertEquals( Integer.toString( i ), i + 1, iterator.nextInt() );
+		}
+
+		iterator = (IntListIterator)m.keySet().iterator( 50 );
+		for( int i = 50; i-- != -1; ) {
+			assertEquals( Integer.toString( i ), i + 1, iterator.previousIndex() );
+			assertEquals( Integer.toString( i ), i + 2, iterator.nextIndex() );
+			if ( i != -1 ) assertEquals( Integer.toString( i ), i + 1, iterator.previousInt() );
+		}
+
+		iterator = (IntListIterator)m.keySet().iterator( 50 );
+		for( int i = 50; i-- != -1; ) assertEquals( Integer.toString( i ), i + 1, iterator.previousInt() );
+		assertEquals( -1, iterator.previousIndex() );
+		assertEquals( 0, iterator.nextIndex() );
+		
+		iterator = (IntListIterator)m.keySet().iterator( 50 );
+		for( int i = 50; i < 100 - 1; i++ ) assertEquals( Integer.toString( i ), i + 1, iterator.nextInt() );
+		assertEquals( 99, iterator.previousIndex() );
+		assertEquals( 100, iterator.nextIndex() );
+
+		iterator = (IntListIterator)m.keySet().iterator( 50 );
+		iterator.previousInt();
+		iterator.remove();
+		assertEquals( 49, iterator.previousIndex() );
+		assertEquals( 49, iterator.previousInt() );
+		
+		iterator = (IntListIterator)m.keySet().iterator( 49 );
+		iterator.nextInt();
+		iterator.remove();
+		assertEquals( 50, iterator.nextIndex() );
+		assertEquals( 52, iterator.nextInt() );
+	}
+	
+	@Test(expected=NoSuchElementException.class)
+	public void testIteratorMissingElement() {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		m.defaultReturnValue( -1 );
+		for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, i ) );
+		m.keySet().iterator( 1000 );
+	}
+
+
+	@Test
+	public void testPutAndMove() {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		m.defaultReturnValue( Integer.MIN_VALUE );
+		for( int i = 0; i < 100; i++ ) assertEquals( Integer.MIN_VALUE, m.putAndMoveToFirst( i, i ) );
+		m.clear();
+		for( int i = 0; i < 100; i++ ) assertEquals( Integer.MIN_VALUE, m.putAndMoveToLast( i, i ) );
+		assertEquals( Integer.MIN_VALUE, m.putAndMoveToFirst( -1, -1 ) );
+		assertEquals( -1, m.firstIntKey() );
+		assertEquals( Integer.MIN_VALUE, m.putAndMoveToFirst( -2, -2 ) );
+		assertEquals( -2, m.firstIntKey() );
+		assertEquals( -1, m.putAndMoveToFirst( -1, -1 ) );
+		assertEquals( -1, m.firstIntKey() );
+		assertEquals( -1, m.putAndMoveToFirst( -1, -1 ) );
+		assertEquals( -1, m.firstIntKey() );
+		assertEquals( -1, m.putAndMoveToLast( -1, -1 ) );
+		assertEquals( -1, m.lastIntKey() );
+		assertEquals( Integer.MIN_VALUE, m.putAndMoveToLast( 100, 100 ) );
+		assertEquals( 100, m.lastIntKey() );
+		assertEquals( Integer.MIN_VALUE, m.putAndMoveToLast( 101, 101 ) );
+		assertEquals( 101, m.lastIntKey() );
+		assertEquals( 100, m.putAndMoveToLast( 100, 100 ) );
+		assertEquals( 100, m.lastIntKey() );
+		assertEquals( 100, m.putAndMoveToLast( 100, 100 ) );
+		assertEquals( 100, m.lastIntKey() );
+		assertEquals( 100, m.putAndMoveToFirst( 100, 100 ) );
+		assertEquals( 100, m.firstIntKey() );
+	}
+
+	@Test
+	public void testRemoveFirstLast() {
+		Int2IntLinkedOpenHashMap m = new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		m.defaultReturnValue( -1 );
+		for( int i = 0; i < 100; i++ ) assertEquals( -1, m.put( i, 1 + i ) );
+		assertEquals( 1, m.removeFirstInt() );
+		assertEquals( 2, m.removeFirstInt() );
+		assertEquals( 100, m.removeLastInt() );
+	}	
+
+	@Test(expected=NoSuchElementException.class)
+	public void testRemoveFirstEmpty() {
+		new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ).removeFirstInt();
+	}
+
+	@Test(expected=NoSuchElementException.class)
+	public void testRemoveLastEmpty() {
+		new Int2IntLinkedOpenHashMap( Hash.DEFAULT_INITIAL_SIZE ).removeLastInt();
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/Int2IntOpenCustomHashMapTest.java b/test/it/unimi/dsi/fastutil/ints/Int2IntOpenCustomHashMapTest.java
new file mode 100644
index 0000000..608044a
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/Int2IntOpenCustomHashMapTest.java
@@ -0,0 +1,287 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.Hash;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.HashMap;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+
+/** Not a particularly good test, but it will check that we use everywhere the same hashing strategy. */
+
+public class Int2IntOpenCustomHashMapTest {
+
+	private static final class Strategy implements IntHash.Strategy, Serializable {
+		private static final long serialVersionUID = 1L;
+
+		@Override
+		public int hashCode( int e ) {
+			return Integer.reverse( e );
+		}
+
+		@Override
+		public boolean equals( int a, int b ) {
+			return a == b;
+		}
+	}
+
+	private final static Strategy strategy = new Strategy();
+	
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt( 10 );
+	}
+	
+	@SuppressWarnings("boxing")
+	private static void checkTable( Int2IntOpenCustomHashMap s ) {
+		final boolean[] used = s.used;
+		final int[]key = s.key;
+		assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n;
+		assert s.n == s.key.length;
+		assert s.n == used.length;
+		int n = s.n;
+		while ( n-- != 0 )
+			if ( used[ n ] && !s.containsKey( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ]
+					+ " marked as occupied, but the key does not belong to the table" );
+
+		java.util.HashMap<Integer,Integer> t = new java.util.HashMap<Integer, Integer>();
+		for ( int i = s.size(); i-- != 0; )
+			if ( used[ i ] && t.put( key[ i ], key[ i ] ) != null ) throw new AssertionError( "Key " + key[ i ] + " appears twice" );
+
+	}
+
+	private static void printProbes( Int2IntOpenCustomHashMap m ) {
+		long totProbes = 0;
+		double totSquareProbes = 0;
+		int maxProbes = 0;
+		final double f = (double)m.size / m.n;
+		for ( int i = 0, c = 0; i < m.n; i++ ) {
+			if ( m.used[ i ] ) c++;
+			else {
+				if ( c != 0 ) {
+					final long p = ( c + 1 ) * ( c + 2 ) / 2;
+					totProbes += p;
+					totSquareProbes += (double)p * p;
+				}
+				maxProbes = Math.max( c, maxProbes );
+				c = 0;
+				totProbes++;
+				totSquareProbes++;
+			}
+		}
+
+		final double expected = (double)totProbes / m.n;
+		System.err.println( "Expected probes: " + (
+				3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1
+				) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes );
+	}
+
+	@SuppressWarnings({ "boxing" })
+	private static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		int c;
+		final Integer key[] = new Integer[ (int)Math.ceil( n * f ) ];
+		HashMap<Integer,Integer> t = new HashMap<Integer,Integer>();
+		/* First of all, we fill t with random data. */
+
+		for ( int i = 0; i < key.length; i++ ) t.put( key[ i ] = new Integer( genKey() ), key[ i ] );
+
+		Int2IntOpenCustomHashMap m = new Int2IntOpenCustomHashMap( Hash.DEFAULT_INITIAL_SIZE, f, strategy );
+
+		
+		/* Now we add to m the same data */
+
+		m.putAll( t );
+		checkTable( m );
+		
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		printProbes( m );
+
+		/* Now we check that m actually holds that data. */
+
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.get( e ).equals( e ) );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		c = 0;
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			c++;
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.get( e ).equals( e ) );
+		}
+
+		assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() );
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			if ( m.containsKey( T ) ) assertEquals( "Error: divergence in keys between t and m (polymorphic method)", Integer.valueOf( m.get( T ) ), t.get( Integer.valueOf( T ) ) );
+			else assertFalse( "Error: divergence in keys between t and m (polymorphic method)", t.containsKey( Integer.valueOf( T ) ) );
+		}
+
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertEquals( "Error: divergence between t and m (standard method)", m.get( Integer.valueOf( T ) ), t.get( Integer.valueOf( T ) ) );
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertEquals( "Error: divergence in add() between t and m", m.put( Integer.valueOf( T ), Integer.valueOf( T ) ), t.put( Integer.valueOf( T ), Integer.valueOf( T ) ) );
+			T = genKey();
+			assertEquals( "Error: divergence in remove() between t and m", m.remove( Integer.valueOf( T ) ), t.remove( Integer.valueOf( T ) ) );
+		}
+
+		checkTable( m );
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		/* Now we check that m actually holds that data. */
+
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", !m.get( e ).equals( e ) );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", !t.get( e ).equals( e ) );
+		}
+
+		/* Now we check cloning. */
+
+		assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) );
+		assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) );
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+
+		oos.writeObject( m );
+		oos.close();
+
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+
+		m = (Int2IntOpenCustomHashMap)ois.readObject();
+		ois.close();
+		ff.delete();
+
+		assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() );
+
+		printProbes( m );
+		checkTable( m );
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after save/read", !t.get( e ).equals( e ) );
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertEquals( "Error: divergence in add() between t and m after save/read", m.put( Integer.valueOf( T ), Integer.valueOf( T ) ), t.put( Integer.valueOf( T ), Integer.valueOf( T ) ) );
+			T = genKey();
+			assertEquals( "Error: divergence in remove() between t and m after save/read", m.remove( Integer.valueOf( T ) ), t.remove( Integer.valueOf( T ) ) );
+		}
+
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) );
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			i.next();
+			i.remove();
+		}
+
+		assertFalse( "Error: m is not empty (as it should be)", !m.isEmpty() );
+
+
+		m = new Int2IntOpenCustomHashMap( n, f, strategy );
+		t.clear();
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		for( int i = n; i-- != 0; ) m.put( i, i );
+		t.putAll( m );
+		printProbes( m );
+		checkTable( m );
+
+		for( int i = n; i-- != 0; )
+			assertEquals( "Error: m and t differ on a key during torture-test insertion.", Integer.valueOf( m.put( i, i ) ), t.put( ( Integer.valueOf( i ) ), ( Integer.valueOf( i ) ) ) );
+
+		assertTrue( "Error: !m.equals(t) after torture-test insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test insertion", t.equals( m ) );
+
+		for( int i = n; i-- != 0; )
+			assertEquals( "Error: m and t differ on a key during torture-test insertion.", Integer.valueOf( m.remove( i ) ), t.remove( ( Integer.valueOf( i ) ) ) );
+
+		assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) );
+		assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) );
+		assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) );
+
+		return;
+	}
+
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/Int2IntOpenHashMapTest.java b/test/it/unimi/dsi/fastutil/ints/Int2IntOpenHashMapTest.java
new file mode 100644
index 0000000..4065674
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/Int2IntOpenHashMapTest.java
@@ -0,0 +1,284 @@
+package it.unimi.dsi.fastutil.ints;
+
+import it.unimi.dsi.fastutil.Hash;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.junit.Ignore;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+ at SuppressWarnings("rawtypes")
+public class Int2IntOpenHashMapTest {
+
+	@Test
+	public void testStrangeRetainAllCase() {
+
+		IntArrayList initialElements = IntArrayList.wrap(new int[] { 586, 940,
+				1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241,
+				1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360,
+				1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437,
+				1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547,
+				1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588,
+				1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654,
+				1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705,
+				1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756,
+				1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801,
+				1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839,
+				1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052,
+				3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137,
+				3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209,
+				3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247,
+				3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871,
+				4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925,
+				4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990,
+				4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076,
+				7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466,
+				7467 });
+
+		IntArrayList retainElements = IntArrayList.wrap(new int[] { 586 });
+
+		// Initialize both implementations with the same data
+		Int2IntOpenHashMap instance = new Int2IntOpenHashMap(initialElements.elements(), new int[ initialElements.size() ]);
+		IntRBTreeSet referenceInstance = new IntRBTreeSet(initialElements);
+
+		instance.keySet().retainAll(retainElements);
+		referenceInstance.retainAll(retainElements);
+
+		// print the correct result {586}
+		System.out.println("ref: " + referenceInstance);
+
+		// prints {586, 7379}, which is clearly wrong
+		System.out.println("ohm: " + instance);
+
+		// Fails
+		assertEquals( referenceInstance, instance.keySet() );
+	}	
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	private static int genValue() {
+		return r.nextInt();
+	}
+
+	private static boolean valEquals( Object o1, Object o2 ) {
+		return o1 == null ? o2 == null : o1.equals( o2 );
+	}
+
+	@SuppressWarnings({ "unchecked", "boxing" })
+	protected static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		Int2IntOpenHashMap m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE, f );
+		Map t = new java.util.HashMap();
+		/* First of all, we fill t with random data. */
+		for ( int i = 0; i < n; i++ )
+			t.put( ( Integer.valueOf( genKey() ) ), ( Integer.valueOf( genValue() ) ) );
+		/* Now we add to m the same data */
+		m.putAll( t );
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		/*
+		 * Now we check that m actually holds that data.
+		 */
+		for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds the same keys. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key (" + o + ") after insertion (iterating on t)", m.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after insertion (iterating on t)", m.keySet().contains( o ) );
+		}
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key after insertion (iterating on m)", t.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (in keySet()) after insertion (iterating on m)", t.keySet().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values. */
+		for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after insertion (iterating on t)", m.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on t)", m.values().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after insertion (iterating on m)", t.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on m)", t.values().contains( o ) );
+		}
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.containsKey( ( Integer.valueOf( T ) ) ) == t.containsKey( ( Integer.valueOf( T ) ) ) );
+			assertTrue( "Error: divergence between t and m (polymorphic method)",
+					!( m.get( T ) != ( 0 ) ) != ( ( t.get( ( Integer.valueOf( T ) ) ) == null ? ( 0 ) : ( ( ( (Integer)( t.get( ( Integer.valueOf( T ) ) ) ) ).intValue() ) ) ) != ( 0 ) ) ||
+							t.get( ( Integer.valueOf( T ) ) ) != null &&
+							!m.get( ( Integer.valueOf( T ) ) ).equals( t.get( ( Integer.valueOf( T ) ) ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence between t and m (standard method)", valEquals( m.get( ( Integer.valueOf( T ) ) ), t.get( ( Integer.valueOf( T ) ) ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			int U = genValue();
+			assertTrue( "Error: divergence in put() between t and m",
+					valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m", valEquals( m.remove( ( Integer.valueOf( T ) ) ), t.remove( ( Integer.valueOf( T ) ) ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		/*
+		 * Now we check that m actually holds the same data.
+		 */
+		for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds the same keys. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key (" + o + ") after removal (iterating on t)", m.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after removal (iterating on t)", m.keySet().contains( o ) );
+		}
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key after removal (iterating on m)", t.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (in keySet()) after removal (iterating on m)", t.keySet().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values. */
+		for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after removal (iterating on t)", m.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on t)", m.values().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after removal (iterating on m)", t.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on m)", t.values().contains( o ) );
+		}
+		int h = m.hashCode();
+		/* Now we save and read m. */
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (Int2IntOpenHashMap)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertEquals( "Error: hashCode() changed after save/read", m.hashCode(), h );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on an entry after save/read", valEquals( m.get( o ), t.get( o ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			int U = genValue();
+			assertTrue( "Error: divergence in put() between t and m after save/read",
+					valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) );
+			T = genKey();
+			Integer result;
+			assertTrue( "Error: divergence in remove() between t and m after save/read", valEquals( m.remove( T ), ( result = (Integer)t.remove( ( Integer.valueOf( T ) ) ) ) != null ? result.intValue() : 0 ) );
+		}
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) );
+		/*
+		 * Now we take out of m everything , and check that it is empty.
+		 */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); )
+			m.remove( i.next() );
+		assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() );
+		m = new Int2IntOpenHashMap( n, f );
+		t.clear();
+		for( int i = n; i-- != 0; ) m.put( i, 1 );
+		t.putAll( m );
+		for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.put( i, 2 ), t.put( Integer.valueOf( i ), 2 ) );	
+		
+		assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) );
+		assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) );
+		assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) );
+		m.trim();
+		assertTrue( "Error: !m.equals(t) after trim()", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after trim()", t.equals( m ) );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+	
+	@Test
+	public void testAdd() {
+		Int2IntOpenHashMap m = new Int2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE );
+		assertEquals( 0, m.add( 0, 2 ) );
+		assertEquals( 2, m.get( 0 ) );
+		assertEquals( 2, m.add( 0, 3 ) );
+		assertEquals( 5, m.get( 0 ) );
+		m.defaultReturnValue( -1 );
+		assertEquals( -1, m.add( 1, 1 ) );
+		assertEquals( 0, m.get( 1 ) );
+		assertEquals( 0, m.add( 1, 1 ) );
+		assertEquals( 1, m.get( 1 ) );
+		assertEquals( 1, m.add( 1, -2 ) );
+		assertEquals( -1, m.get( 1 ) );
+		
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/Int2ObjectLinkedOpenHashMapTest.java b/test/it/unimi/dsi/fastutil/ints/Int2ObjectLinkedOpenHashMapTest.java
new file mode 100644
index 0000000..a8a6561
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/Int2ObjectLinkedOpenHashMapTest.java
@@ -0,0 +1,111 @@
+package it.unimi.dsi.fastutil.ints;
+
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map.Entry;
+
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class Int2ObjectLinkedOpenHashMapTest {
+	private static Int2ObjectLinkedOpenHashMap<Integer> original = new Int2ObjectLinkedOpenHashMap<Integer>();
+	static {
+		original.put( 0, Integer.valueOf( 30303000 ) ); // slot 0
+		original.put( -152, Integer.valueOf( 31313100 ) ); // slot 31
+		original.put( -1520082082, Integer.valueOf( 42 ) ); // slot 31 -> 1
+		original.put( 20, Integer.valueOf( 30303001 ) ); // slot 1 -> 2
+	}
+
+	public void print( Int2ObjectLinkedOpenHashMap<Integer> cache ) {
+		System.err.println( "\nYou should get the following entries in no particular order:\n" + cache + "\n" );
+	}
+
+	/**
+	 * a hash collision (hashcode & mask) at the very end of the underlying array inducing a wrap
+	 * around the array and causing a gap of unused entries from index 3 to 30 the cursor of the
+	 * element which should be returned next will already be set to index 2
+	 */
+
+	@Test
+	public void twiceDoubleCollision() {
+		Int2ObjectLinkedOpenHashMap<Integer> cache = new Int2ObjectLinkedOpenHashMap<Integer>();
+		cache.put( 0, Integer.valueOf( 30303000 ) ); // slot 0
+		cache.put( -152, Integer.valueOf( 31313100 ) ); // slot 31
+		cache.put( -1520082082, Integer.valueOf( 42 ) ); // slot 31 -> 1
+		cache.put( 20, Integer.valueOf( 30303001 ) ); // slot 1 -> 2
+		print( cache );
+		handleCacheIterator( cache.int2ObjectEntrySet().fastIterator() );
+	}
+
+
+
+	/**
+	 * a hash collision (hashcode & mask) at the very end of the underlying array inducing a wrap
+	 * around the array and causing a gap of unused entries from index 2 to 30 the cursor of the
+	 * element which should be returned next will already be set to index 1
+	 * 
+	 * Three entries are added that should be put at the same position (index 31), ending at 31,0,1
+	 */
+	@Test
+	public void tripleCollision() {
+		Int2ObjectLinkedOpenHashMap<Integer> cache = new Int2ObjectLinkedOpenHashMap<Integer>();
+		cache.put( -152, Integer.valueOf( 1 ) ); // slot 31
+		cache.put( -1520082082, Integer.valueOf( 42 ) ); // slot 31 -> 0
+		cache.put( 256740984, Integer.valueOf( 666 ) ); // slot 31 ->1
+		print( cache );
+		handleCacheIterator( cache.int2ObjectEntrySet().fastIterator() );
+	}
+
+	/**
+	 * a hash collision (hashcode & mask) in the middle of the underlying array Works fine in
+	 * fastutil 6.3 already, just added for the sake of completeness
+	 */
+	@Test
+	public void normalCollision() {
+		Int2ObjectLinkedOpenHashMap<Integer> cache = new Int2ObjectLinkedOpenHashMap<Integer>();
+		cache.put( 19, Integer.valueOf( 1111 ) ); // slot 20
+		cache.put( 55, Integer.valueOf( 2222 ) ); // slot 20 ->21
+		print( cache );
+		handleCacheIterator( cache.int2ObjectEntrySet().fastIterator() );
+	}
+
+	public void handleCacheIterator( ObjectIterator<it.unimi.dsi.fastutil.ints.Int2ObjectMap.Entry<Integer>> iterator ) {
+
+		final List<Integer> finishedEntries = new ArrayList<Integer>();
+		final List<Integer> finishedEntriesAfterRemove = new ArrayList<Integer>();
+		final List<Integer> finishedEntryKey = new ArrayList<Integer>();
+		final List<Integer> finishedEntryKeyAfter = new ArrayList<Integer>();
+		int i = 0;
+		while ( iterator.hasNext() ) {
+			it.unimi.dsi.fastutil.ints.Int2ObjectMap.Entry<Integer> next = iterator.next();
+			System.err.println( "element returned at position " + ( i++ ) + ": " + next );
+			final Integer value = next.getValue();
+			Assert.assertNotNull( value );
+			finishedEntryKey.add( next.getKey() );
+			iterator.remove();
+			finishedEntryKeyAfter.add( next.getKey() );
+			finishedEntries.add( value );
+			finishedEntriesAfterRemove.add( next.getValue() );
+
+		}
+		System.err.println( "\nkeys if retrieved before remove():\n" + finishedEntryKey );
+		System.err.println( "and the values :\n" + finishedEntries );
+		System.err.println( "\nkeys if retrieved after remove():\n" + finishedEntryKeyAfter );
+		System.err.println( "and the values :\n" + finishedEntriesAfterRemove );
+	}
+
+	@Test(expected=ArrayIndexOutOfBoundsException.class)
+	public void testRemovedEntry() {
+		final Int2ObjectLinkedOpenHashMap<Integer> map = new Int2ObjectLinkedOpenHashMap<Integer>();
+		map.put( 1, Integer.valueOf( 0 ) );
+		map.put( 2, Integer.valueOf( 1 ) );
+		final ObjectIterator<Entry<Integer, Integer>> iterator = map.entrySet().iterator();
+		final Entry e = iterator.next();
+		iterator.remove();
+		e.getKey();
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/Int2ObjectOpenHashMapTest.java b/test/it/unimi/dsi/fastutil/ints/Int2ObjectOpenHashMapTest.java
new file mode 100644
index 0000000..47a144a
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/Int2ObjectOpenHashMapTest.java
@@ -0,0 +1,111 @@
+package it.unimi.dsi.fastutil.ints;
+
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map.Entry;
+
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class Int2ObjectOpenHashMapTest {
+	private static Int2ObjectOpenHashMap<Integer> original = new Int2ObjectOpenHashMap<Integer>();
+	static {
+		original.put( 0, Integer.valueOf( 30303000 ) ); // slot 0
+		original.put( -152, Integer.valueOf( 31313100 ) ); // slot 31
+		original.put( -1520082082, Integer.valueOf( 42 ) ); // slot 31 -> 1
+		original.put( 20, Integer.valueOf( 30303001 ) ); // slot 1 -> 2
+	}
+
+	public void print( Int2ObjectOpenHashMap<Integer> cache ) {
+		System.err.println( "\nYou should get the following entries in no particular order:\n" + cache + "\n" );
+	}
+
+	/**
+	 * a hash collision (hashcode & mask) at the very end of the underlying array inducing a wrap
+	 * around the array and causing a gap of unused entries from index 3 to 30 the cursor of the
+	 * element which should be returned next will already be set to index 2
+	 */
+
+	@Test
+	public void twiceDoubleCollision() {
+		Int2ObjectOpenHashMap<Integer> cache = new Int2ObjectOpenHashMap<Integer>();
+		cache.put( 0, Integer.valueOf( 30303000 ) ); // slot 0
+		cache.put( -152, Integer.valueOf( 31313100 ) ); // slot 31
+		cache.put( -1520082082, Integer.valueOf( 42 ) ); // slot 31 -> 1
+		cache.put( 20, Integer.valueOf( 30303001 ) ); // slot 1 -> 2
+		print( cache );
+		handleCacheIterator( cache.int2ObjectEntrySet().fastIterator() );
+	}
+
+
+
+	/**
+	 * a hash collision (hashcode & mask) at the very end of the underlying array inducing a wrap
+	 * around the array and causing a gap of unused entries from index 2 to 30 the cursor of the
+	 * element which should be returned next will already be set to index 1
+	 * 
+	 * Three entries are added that should be put at the same position (index 31), ending at 31,0,1
+	 */
+	@Test
+	public void tripleCollision() {
+		Int2ObjectOpenHashMap<Integer> cache = new Int2ObjectOpenHashMap<Integer>();
+		cache.put( -152, Integer.valueOf( 1 ) ); // slot 31
+		cache.put( -1520082082, Integer.valueOf( 42 ) ); // slot 31 -> 0
+		cache.put( 256740984, Integer.valueOf( 666 ) ); // slot 31 ->1
+		print( cache );
+		handleCacheIterator( cache.int2ObjectEntrySet().fastIterator() );
+	}
+
+	/**
+	 * a hash collision (hashcode & mask) in the middle of the underlying array Works fine in
+	 * fastutil 6.3 already, just added for the sake of completeness
+	 */
+	@Test
+	public void normalCollision() {
+		Int2ObjectOpenHashMap<Integer> cache = new Int2ObjectOpenHashMap<Integer>();
+		cache.put( 19, Integer.valueOf( 1111 ) ); // slot 20
+		cache.put( 55, Integer.valueOf( 2222 ) ); // slot 20 ->21
+		print( cache );
+		handleCacheIterator( cache.int2ObjectEntrySet().fastIterator() );
+	}
+
+	public void handleCacheIterator( ObjectIterator<it.unimi.dsi.fastutil.ints.Int2ObjectMap.Entry<Integer>> iterator ) {
+
+		final List<Integer> finishedEntries = new ArrayList<Integer>();
+		final List<Integer> finishedEntriesAfterRemove = new ArrayList<Integer>();
+		final List<Integer> finishedEntryKey = new ArrayList<Integer>();
+		final List<Integer> finishedEntryKeyAfter = new ArrayList<Integer>();
+		int i = 0;
+		while ( iterator.hasNext() ) {
+			it.unimi.dsi.fastutil.ints.Int2ObjectMap.Entry<Integer> next = iterator.next();
+			System.err.println( "element returned at position " + ( i++ ) + ": " + next );
+			final Integer value = next.getValue();
+			Assert.assertNotNull( value );
+			finishedEntryKey.add( next.getKey() );
+			iterator.remove();
+			finishedEntryKeyAfter.add( next.getKey() );
+			finishedEntries.add( value );
+			finishedEntriesAfterRemove.add( next.getValue() );
+
+		}
+		System.err.println( "\nkeys if retrieved before remove():\n" + finishedEntryKey );
+		System.err.println( "and the values :\n" + finishedEntries );
+		System.err.println( "\nkeys if retrieved after remove():\n" + finishedEntryKeyAfter );
+		System.err.println( "and the values :\n" + finishedEntriesAfterRemove );
+	}
+	
+	@Test(expected=ArrayIndexOutOfBoundsException.class)
+	public void testRemovedEntry() {
+		final Int2ObjectOpenHashMap<Integer> map = new Int2ObjectOpenHashMap<Integer>();
+		map.put( 1, Integer.valueOf( 0 ) );
+		map.put( 2, Integer.valueOf( 1 ) );
+		final ObjectIterator<Entry<Integer, Integer>> iterator = map.entrySet().iterator();
+		final Entry e = iterator.next();
+		iterator.remove();
+		e.getKey();
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntArrayFIFOQueueTest.java b/test/it/unimi/dsi/fastutil/ints/IntArrayFIFOQueueTest.java
new file mode 100644
index 0000000..411724f
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntArrayFIFOQueueTest.java
@@ -0,0 +1,147 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+
+public class IntArrayFIFOQueueTest {
+	
+	@Test
+	public void testEnqueueDequeue() {
+		IntArrayFIFOQueue q = new IntArrayFIFOQueue();
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			assertEquals( i, q.lastInt() );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( i, q.firstInt() );
+			assertEquals( i, q.dequeueInt() );
+			if ( i != 99 ) assertEquals( 99, q.lastInt() );
+		}
+
+		q = new IntArrayFIFOQueue( 10 );
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			assertEquals( i, q.lastInt() );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( i, q.firstInt() );
+			assertEquals( i, q.dequeueInt() );
+			if ( i != 99 ) assertEquals( 99, q.lastInt() );
+		}
+
+		q = new IntArrayFIFOQueue( 200 );
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			assertEquals( i, q.lastInt() );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( i, q.firstInt() );
+			assertEquals( i, q.dequeueInt() );
+			if ( i != 99 ) assertEquals( 99, q.lastInt() );
+		}
+	}
+
+	@Test
+	public void testMix() {
+		IntArrayFIFOQueue q = new IntArrayFIFOQueue();
+		for( int i = 0, p = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * 20 );
+				assertEquals( j + i * 20, q.lastInt() );
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( p++, q.dequeueInt() );
+		}
+		
+		q = new IntArrayFIFOQueue( 10 );
+		for( int i = 0, p = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * 20 );
+				assertEquals( j + i * 20, q.lastInt() );
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( p++, q.dequeueInt() );
+		}
+
+		q = new IntArrayFIFOQueue( 200 );
+		for( int i = 0, p = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * 20 );
+				assertEquals( j + i * 20, q.lastInt() );
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( p++, q.dequeueInt() );
+		}
+	}
+
+	@Test
+	public void testWrap() {
+		IntArrayFIFOQueue q = new IntArrayFIFOQueue( 30 );
+		for( int i = 0; i < 20; i++ ) {
+			q.enqueue( i );
+			assertEquals( i, q.lastInt() );
+		}
+		for( int j = 0; j < 10; j++ ) assertEquals( j, q.dequeueInt() );
+		for( int i = 0; i < 15; i++ ) {
+			q.enqueue( i );
+			assertEquals( i, q.lastInt() );
+		}
+		for( int j = 10; j < 20; j++ ) assertEquals( j, q.dequeueInt() );
+		for( int j = 0; j < 15; j++ ) assertEquals( j, q.dequeueInt() );
+	}
+
+	@Test
+	public void testTrim() {
+		IntArrayFIFOQueue q = new IntArrayFIFOQueue( 30 );
+		for( int j = 0; j < 20; j++ ) q.enqueue( j );
+		for( int j = 0; j < 10; j++ ) assertEquals( j, q.dequeueInt() );
+		for( int j = 0; j < 15; j++ ) q.enqueue( j );
+		
+		q.trim();
+		for( int j = 10; j < 20; j++ ) assertEquals( j, q.dequeueInt() );
+		for( int j = 0; j < 15; j++ ) assertEquals( j, q.dequeueInt() );
+		
+		q = new IntArrayFIFOQueue( 30 );
+		for( int j = 0; j < 20; j++ ) q.enqueue( j );
+		q.trim();
+		for( int j = 0; j < 20; j++ ) assertEquals( j, q.dequeueInt() );
+	}
+
+	@Test
+	public void testDeque() {
+		IntArrayFIFOQueue q = new IntArrayFIFOQueue( 4 );
+		q.enqueue( 0 );
+		q.enqueue( 1 );
+		q.enqueue( 2 );
+		assertEquals( q.dequeueInt(), 0 );
+		assertEquals( q.dequeueInt(), 1 );
+		q.enqueue( 3 );
+		assertEquals( q.dequeueLastInt(), 3 );
+		assertEquals( q.dequeueLastInt(), 2 );
+		q.enqueueFirst( 1 );
+		q.enqueueFirst( 0 );
+		assertEquals( 0, q.dequeueInt() );
+		assertEquals( 1, q.dequeueInt() );
+		
+		
+		
+		q = new IntArrayFIFOQueue( 4 );
+		q.enqueueFirst( 0 );
+		q.enqueueFirst( 1 );
+		assertEquals( 1, q.dequeueInt() );
+		assertEquals( 0, q.dequeueInt() );
+		q.enqueueFirst( 0 );
+		q.enqueueFirst( 1 );
+		q.enqueueFirst( 2 );
+		q.enqueueFirst( 3 );
+		assertEquals( 3, q.dequeueInt() );
+		assertEquals( 2, q.dequeueInt() );
+		assertEquals( 1, q.dequeueInt() );
+		assertEquals( 0, q.dequeueInt() );
+	}
+
+	@Test
+	public void testImmediateReduce() {
+		IntArrayFIFOQueue q = new IntArrayFIFOQueue();
+		q.enqueue( 0 );
+		q.dequeue();
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntArrayFrontCodedListTest.java b/test/it/unimi/dsi/fastutil/ints/IntArrayFrontCodedListTest.java
new file mode 100644
index 0000000..eb33ba0
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntArrayFrontCodedListTest.java
@@ -0,0 +1,125 @@
+package it.unimi.dsi.fastutil.ints;
+
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+
+import java.io.IOException;
+
+import static org.junit.Assert.*;
+import org.junit.Test;
+
+ at SuppressWarnings({ "rawtypes", "unchecked" })
+public class IntArrayFrontCodedListTest {
+
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	private static boolean contentEquals( java.util.List x, java.util.List y ) {
+		if ( x.size() != y.size() ) return false;
+		for ( int i = 0; i < x.size(); i++ )
+			if ( !java.util.Arrays.equals( (int[])x.get( i ), (int[])y.get( i ) ) ) return false;
+		return true;
+	}
+
+	private static int l[];
+
+	private static int[][] a;
+
+	private static void test( int n ) throws IOException, ClassNotFoundException {
+		l = new int[ n ];
+		a = new int[ n ][];
+		for ( int i = 0; i < n; i++ )
+			l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 );
+		for ( int i = 0; i < n; i++ )
+			a[ i ] = new int[ l[ i ] ];
+		for ( int i = 0; i < n; i++ )
+			for ( int j = 0; j < l[ i ]; j++ )
+				a[ i ][ j ] = genKey();
+		IntArrayFrontCodedList m = new IntArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 );
+		it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a );
+		// System.out.println(m);
+		// for( i = 0; i < t.size(); i++ )
+		// System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i)));
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) );
+		/* Now we check cloning. */
+		assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) );
+		/* Now we play with iterators. */
+		{
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator();
+			j = t.listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (int[])i.next(), (int[])j.next() ) );
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (int[])i.previous(), (int[])j.previous() ) );
+				}
+				assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() );
+			}
+		}
+		{
+			int from = r.nextInt( m.size() + 1 );
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator( from );
+			j = t.listIterator( from );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (int[])i.next(), (int[])j.next() ) );
+					// System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (int[])i.previous(), (int[])j.previous() ) );
+				}
+			}
+		}
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (IntArrayFrontCodedList)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1 );
+	}
+
+	@Test
+	public void test10() throws Exception, ClassNotFoundException {
+		test( 10 );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100 );
+	}
+
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000 );
+	}
+
+	@Test
+	public void test10000() throws IOException, ClassNotFoundException {
+		test( 10000 );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntArrayIndirectPriorityQueueTest.java b/test/it/unimi/dsi/fastutil/ints/IntArrayIndirectPriorityQueueTest.java
new file mode 100644
index 0000000..aa5a8ec
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntArrayIndirectPriorityQueueTest.java
@@ -0,0 +1,344 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Random;
+
+import org.junit.Test;
+
+public class IntArrayIndirectPriorityQueueTest {
+
+	@Test
+	public void testFront() {
+		int refArray[] = { 4, 3, 2, 1, 0, 3, 2, 1, 0, 2, 1, 0, 1, 0, 0 };
+		int tops[] = new int[ refArray.length ];
+		final IntArrayIndirectPriorityQueue queue = new IntArrayIndirectPriorityQueue( refArray );
+		for ( int i = refArray.length; i-- != 0; )
+			queue.enqueue( i );
+
+		assertEquals( 5, queue.front( tops ) );
+		assertEquals( new IntOpenHashSet( new int[] { 4, 8, 11, 13, 14 } ), new IntOpenHashSet( tops, 0, 5 ) );
+		for ( int i = 4; i-- != 0; ) {
+			queue.dequeue();
+			assertEquals( i + 1, queue.front( tops ) );
+		}
+		queue.dequeue();
+
+		assertEquals( 4, queue.front( tops ) );
+		assertEquals( new IntOpenHashSet( new int[] { 3, 7, 10, 12 } ), new IntOpenHashSet( tops, 0, 4 ) );
+		for ( int i = 3; i-- != 0; ) {
+			queue.dequeue();
+			assertEquals( i + 1, queue.front( tops ) );
+		}
+		queue.dequeue();
+
+		assertEquals( 3, queue.front( tops ) );
+		assertEquals( new IntOpenHashSet( new int[] { 2, 6, 9 } ), new IntOpenHashSet( tops, 0, 3 ) );
+		for ( int i = 2; i-- != 0; ) {
+			queue.dequeue();
+			assertEquals( i + 1, queue.front( tops ) );
+		}
+		queue.dequeue();
+
+		assertEquals( 2, queue.front( tops ) );
+		assertEquals( new IntOpenHashSet( new int[] { 1, 5 } ), new IntOpenHashSet( tops, 0, 2 ) );
+		queue.dequeue();
+		assertEquals( 1, queue.front( tops ) );
+		queue.dequeue();
+
+		assertEquals( 1, queue.front( tops ) );
+	}
+
+
+	private int[] ref;
+
+	private boolean heapEqual( int[] a, int[] b, int sizea, int sizeb ) {
+		if ( sizea != sizeb ) return false;
+		int[] aa = new int[ sizea ];
+		int[] bb = new int[ sizea ];
+		for ( int i = 0; i < sizea; i++ ) {
+			aa[ i ] = ref[ a[ i ] ];
+			bb[ i ] = ref[ b[ i ] ];
+		}
+		java.util.Arrays.sort( aa );
+		java.util.Arrays.sort( bb );
+		while ( sizea-- != 0 )
+			if ( !( ( aa[ sizea ] ) == ( bb[ sizea ] ) ) ) return false;
+		return true;
+	}
+
+	public void test( int n ) {
+		Exception mThrowsIllegal, tThrowsIllegal, mThrowsOutOfBounds, tThrowsOutOfBounds, mThrowsNoElement, tThrowsNoElement;
+		int rm = 0, rt = 0;
+		Random r = new Random( 0 );
+		ref = new int[ n ];
+
+		for ( int i = 0; i < n; i++ ) ref[ i ] = r.nextInt();
+
+		IntArrayIndirectPriorityQueue m = new IntArrayIndirectPriorityQueue( ref );
+		IntHeapIndirectPriorityQueue t = new IntHeapIndirectPriorityQueue( ref );
+
+		/* We add pairs to t. */
+		for ( int i = 0; i < n / 2; i++ ) {
+			t.enqueue( i );
+			m.enqueue( i );
+		}
+
+		assertTrue( "Error: m and t differ after creation (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) );
+
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 2 * n; i++ ) {
+			if ( r.nextDouble() < 0.01 ) {
+				t.clear();
+				m.clear();
+				for ( int j = 0; j < n / 2; j++ ) {
+					t.enqueue( j );
+					m.enqueue( j );
+				}
+			}
+
+			int T = r.nextInt( 2 * n );
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				t.enqueue( T );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			catch ( IllegalArgumentException e ) {
+				tThrowsIllegal = e;
+			}
+
+			if ( tThrowsIllegal == null ) { // To skip duplicates
+				try {
+					m.enqueue( T );
+				}
+				catch ( IndexOutOfBoundsException e ) {
+					mThrowsOutOfBounds = e;
+				}
+				catch ( IllegalArgumentException e ) {
+					mThrowsIllegal = e;
+				}
+			}
+
+			mThrowsIllegal = tThrowsIllegal = null; // To skip duplicates
+
+			assertTrue( "Error: enqueue() divergence in IndexOutOfBoundsException for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error: enqueue() divergence in IllegalArgumentException for " + T + " (" + mThrowsIllegal + ", " + tThrowsIllegal + ")",
+					( mThrowsIllegal == null ) == ( tThrowsIllegal == null ) );
+
+			assertTrue( "Error: m and t differ after enqueue (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) );
+
+			if ( m.size() != 0 ) {
+				assertTrue( "Error: m and t differ in first element after enqueue (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")",
+						( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) );
+			}
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			try {
+				rm = m.dequeue();
+				while ( !m.isEmpty() && ( ( ref[ m.first() ] ) == ( ref[ rm ] ) ) )	m.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			catch ( IllegalArgumentException e ) {
+				mThrowsIllegal = e;
+			}
+			catch ( java.util.NoSuchElementException e ) {
+				mThrowsNoElement = e;
+			}
+
+			try {
+				rt = t.dequeue();
+				while ( !t.isEmpty() && ( ( ref[ t.first() ] ) == ( ref[ rt ] ) ) )
+					t.dequeue();
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			catch ( IllegalArgumentException e ) {
+				tThrowsIllegal = e;
+			}
+			catch ( java.util.NoSuchElementException e ) {
+				tThrowsNoElement = e;
+			}
+
+			assertTrue( "Error: dequeue() divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error: dequeue() divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")", ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ) );
+			assertTrue( "Error: dequeue() divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")",
+					( mThrowsNoElement == null ) == ( tThrowsNoElement == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error: divergence in dequeue() between m and t (" + rm + "->" + ref[ rm ] + ", " + rt + "->" + ref[ rt ] + ")",
+					( ( ref[ rt ] ) == ( ref[ rm ] ) ) );
+
+
+			assertTrue( "Error: m and t differ after dequeue (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) );
+
+			if ( m.size() != 0 ) {
+				assertTrue( "Error: m and t differ in first element after dequeue (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")",
+						( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) );
+			}
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+
+			int pos = r.nextInt( n * 2 );
+
+			try {
+				m.remove( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			catch ( IllegalArgumentException e ) {
+				mThrowsIllegal = e;
+			}
+			catch ( java.util.NoSuchElementException e ) {
+				mThrowsNoElement = e;
+			}
+
+			try {
+				t.remove( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			catch ( IllegalArgumentException e ) {
+				tThrowsIllegal = e;
+			}
+			catch ( java.util.NoSuchElementException e ) {
+				tThrowsNoElement = e;
+			}
+
+			assertTrue( "Error: remove(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error: remove(int) divergence in IllegalArgumentException  (" + mThrowsIllegal + ", " + tThrowsIllegal + ")", ( mThrowsIllegal == null ) == ( tThrowsIllegal == null ) );
+			assertTrue( "Error: remove(int) divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")",
+					( mThrowsNoElement == null ) == ( tThrowsNoElement == null ) );
+
+			assertTrue( "Error: m and t differ after remove(int) (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) );
+
+			if ( m.size() != 0 ) {
+				assertTrue( "Error: m and t differ in first element after remove(int) (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")",
+						( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) );
+			}
+
+
+			mThrowsNoElement = tThrowsNoElement = mThrowsOutOfBounds = tThrowsOutOfBounds = mThrowsIllegal = tThrowsIllegal = null;
+
+			pos = r.nextInt( n );
+
+			try {
+				t.changed( pos );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			catch ( IllegalArgumentException e ) {
+				tThrowsIllegal = e;
+			}
+			catch ( java.util.NoSuchElementException e ) {
+				tThrowsNoElement = e;
+			}
+
+			if ( tThrowsIllegal == null ) {
+				try {
+					m.changed( pos );
+				}
+				catch ( IndexOutOfBoundsException e ) {
+					mThrowsOutOfBounds = e;
+				}
+				catch ( IllegalArgumentException e ) {
+					mThrowsIllegal = e;
+				}
+				catch ( java.util.NoSuchElementException e ) {
+					mThrowsNoElement = e;
+				}
+			}
+
+			assertTrue( "Error: change(int) divergence in IndexOutOfBoundsException (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			// assertTrue( "Error: change(int) divergence in IllegalArgumentException  (" +
+			// mThrowsIllegal + ", " + tThrowsIllegal + ")" , ( mThrowsIllegal == null ) == (
+			// tThrowsIllegal == null ) );
+			assertTrue( "Error: change(int) divergence in java.util.NoSuchElementException  (" + mThrowsNoElement + ", " + tThrowsNoElement + ")",
+					( mThrowsNoElement == null ) == ( tThrowsNoElement == null ) );
+
+			assertTrue( "Error: m and t differ after change(int) (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) );
+
+			if ( m.size() != 0 ) {
+				assertTrue( "Error: m and t differ in first element after change(int) (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")",
+						( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) );
+			}
+
+			int[] temp = (int[])t.heap.clone();
+			java.util.Arrays.sort( temp, 0, t.size() ); // To scramble a bit
+			m = new IntArrayIndirectPriorityQueue( m.refArray, temp, t.size() );
+
+			assertTrue( "Error: m and t differ after wrap (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) );
+
+			if ( m.size() != 0 ) {
+				assertTrue( "Error: m and t differ in first element after wrap (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")",
+						( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) );
+			}
+
+			if ( m.size() != 0 && ( ( new it.unimi.dsi.fastutil.ints.IntOpenHashSet( m.array, 0, m.size ) ).size() == m.size() ) ) {
+
+				int first = m.first();
+				ref[ first ] = r.nextInt();
+
+				// System.err.println("Pre-change m: " +m );
+				// System.err.println("Pre-change t: " +t );
+				m.changed();
+				t.changed( first );
+
+				// System.err.println("Post-change m: " +m );
+				// System.err.println("Post-change t: " +t );
+
+				assertTrue( "Error: m and t differ after change (" + m + ", " + t + ")", heapEqual( m.array, t.heap, m.size(), t.size() ) );
+
+				if ( m.size() != 0 ) {
+					assertTrue( "Error: m and t differ in first element after change (" + m.first() + "->" + ref[ m.first() ] + ", " + t.first() + "->" + ref[ t.first() ] + ")",
+							( ( ref[ m.first() ] ) == ( ref[ t.first() ] ) ) );
+				}
+			}
+		}
+
+
+		/* Now we check that m actually holds the same data. */
+
+		m.clear();
+		assertTrue( "Error: m is not empty after clear()", m.isEmpty() );
+	}
+
+
+	@Test
+	public void test1() {
+		test( 1 );
+
+	}
+
+	@Test
+	public void test10() {
+		test( 10 );
+
+	}
+
+	@Test
+	public void test100() {
+		test( 20 );
+	}
+
+	@Test
+	public void test1000() {
+		test( 1000 );
+	}
+
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntArrayPriorityQueueTest.java b/test/it/unimi/dsi/fastutil/ints/IntArrayPriorityQueueTest.java
new file mode 100644
index 0000000..618ec40
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntArrayPriorityQueueTest.java
@@ -0,0 +1,149 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+
+public class IntArrayPriorityQueueTest {
+	
+	@Test
+	public void testEnqueueDequeue() {
+		IntArrayPriorityQueue q = new IntArrayPriorityQueue();
+		IntHeapPriorityQueue h = new IntHeapPriorityQueue();
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			h.enqueue( i );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( h.first(), q.first() );
+			assertEquals( h.dequeue(), q.dequeue() );
+		}
+
+		q = new IntArrayPriorityQueue( 10 );
+		h.clear();
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			h.enqueue( i );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( h.first(), q.first() );
+			assertEquals( h.dequeue(), q.dequeue() );
+		}
+
+		q = new IntArrayPriorityQueue( 200 );
+		h.clear();
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			h.enqueue( i );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( h.first(), q.first() );
+			assertEquals( h.dequeue(), q.dequeue() );
+		}
+	}
+
+
+	@Test
+	public void testEnqueueDequeueComp() {
+		IntArrayPriorityQueue q = new IntArrayPriorityQueue( IntComparators.OPPOSITE_COMPARATOR );
+		IntHeapPriorityQueue h = new IntHeapPriorityQueue( IntComparators.OPPOSITE_COMPARATOR );
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			h.enqueue( i );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( h.first(), q.first() );
+			assertEquals( h.dequeue(), q.dequeue() );
+		}
+
+		q = new IntArrayPriorityQueue( 10, IntComparators.OPPOSITE_COMPARATOR );
+		h.clear();
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			h.enqueue( i );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( h.first(), q.first() );
+			assertEquals( h.dequeue(), q.dequeue() );
+		}
+
+		q = new IntArrayPriorityQueue( 200, IntComparators.OPPOSITE_COMPARATOR );
+		h.clear();
+		for( int i = 0; i < 100; i++ ) {
+			q.enqueue( i );
+			h.enqueue( i );
+		}
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( h.first(), q.first() );
+			assertEquals( h.dequeue(), q.dequeue() );
+		}
+	}
+	
+	@Test
+	public void testMix() {
+		IntArrayPriorityQueue q = new IntArrayPriorityQueue();
+		IntHeapPriorityQueue h = new IntHeapPriorityQueue();
+		for( int i = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * 20 );
+				h.enqueue( j + i * 20 );
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() );
+		}
+		
+		q = new IntArrayPriorityQueue( 10 );
+		h = new IntHeapPriorityQueue();
+		for( int i = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * -20 );
+				h.enqueue( j + i * -20 );
+				q.first();
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() );
+		}
+
+		q = new IntArrayPriorityQueue( 200 );
+		h = new IntHeapPriorityQueue();
+		for( int i = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * 20 );
+				h.enqueue( j + i * 20 );
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() );
+		}
+	}
+
+	@Test
+	public void testMixComp() {
+		IntArrayPriorityQueue q = new IntArrayPriorityQueue( IntComparators.OPPOSITE_COMPARATOR );
+		IntHeapPriorityQueue h = new IntHeapPriorityQueue( IntComparators.OPPOSITE_COMPARATOR );
+		for( int i = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * 20 );
+				h.enqueue( j + i * 20 );
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() );
+		}
+		
+		q = new IntArrayPriorityQueue( 10, IntComparators.OPPOSITE_COMPARATOR );
+		h = new IntHeapPriorityQueue( IntComparators.OPPOSITE_COMPARATOR );
+		for( int i = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * -20 );
+				h.enqueue( j + i * -20 );
+				q.first();
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() );
+		}
+
+		q = new IntArrayPriorityQueue( 200, IntComparators.OPPOSITE_COMPARATOR );
+		h = new IntHeapPriorityQueue( IntComparators.OPPOSITE_COMPARATOR );
+		for( int i = 0; i < 200; i++ ) {
+			for( int j = 0; j < 20; j++ ) {
+				q.enqueue( j + i * 20 );
+				h.enqueue( j + i * 20 );
+			}
+			for( int j = 0; j < 10; j++ ) assertEquals( h.dequeueInt(), q.dequeueInt() );
+		}
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntArraySetTest.java b/test/it/unimi/dsi/fastutil/ints/IntArraySetTest.java
new file mode 100644
index 0000000..ff7fb11
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntArraySetTest.java
@@ -0,0 +1,80 @@
+package it.unimi.dsi.fastutil.ints;
+
+import it.unimi.dsi.fastutil.ints.IntArraySet;
+import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
+import it.unimi.dsi.fastutil.io.BinIO;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectOutputStream;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class IntArraySetTest {
+	
+	@Test
+	public void testSet() {
+		for( int i = 1; i <= 1; i++ ) {
+			final IntArraySet s = i == 0 ? new IntArraySet() : new IntArraySet( new int[ i ] );
+			assertTrue( s.add( 1 ) );
+			assertEquals( 1 + i, s.size() );
+			assertTrue( s.contains( 1 ) );
+			assertTrue( s.add(  2  ) );
+			assertTrue( s.contains( 2 ) );
+			assertEquals( 2 + i, s.size() );
+			assertFalse( s.add( 1 ) );
+			assertFalse( s.remove( 3 ) );
+			assertTrue( s.add( 3 ) );
+			assertEquals( 3 + i, s.size() );
+			assertTrue( s.contains( 1 ) );
+			assertTrue( s.contains( 2 ) );
+			assertTrue( s.contains( 2 ) );
+			assertEquals( new IntOpenHashSet( i == 0 ? new int[] { 1, 2, 3 } : new int[] { 0, 1, 2, 3 } ), new IntOpenHashSet( s.iterator() ) );
+			assertTrue( s.remove( 3 ) );
+			assertEquals( 2 + i, s.size() );
+			assertTrue( s.remove( 1 ) );
+			assertEquals( 1 + i, s.size() );
+			assertFalse( s.contains( 1 ) );
+			assertTrue( s.remove( 2 ) );
+			assertEquals( 0 + i, s.size() );
+			assertFalse( s.contains( 1 ) );
+		}
+	}
+	
+	@Test
+	public void testClone() {
+		IntArraySet s = new IntArraySet();
+		assertEquals( s, s.clone() );
+		s.add( 0 );
+		assertEquals( s, s.clone() );
+		s.add( 0 );
+		assertEquals( s, s.clone() );
+		s.add( 1 );
+		assertEquals( s, s.clone() );
+		s.add( 2 );
+		assertEquals( s, s.clone() );
+		s.remove( 0 );
+		assertEquals( s, s.clone() );
+	}
+
+	@Test
+	public void testSerialisation() throws IOException, ClassNotFoundException {
+		IntArraySet s = new IntArraySet();
+		ByteArrayOutputStream baos = new ByteArrayOutputStream();
+		ObjectOutputStream oos = new ObjectOutputStream( baos );
+		oos.writeObject( s );
+		oos.close();
+		assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) );
+		
+		s.add( 0 );
+		s.add( 1 );
+
+		baos.reset();
+		oos = new ObjectOutputStream( baos );
+		oos.writeObject( s );
+		oos.close();
+		assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntArraysTest.java b/test/it/unimi/dsi/fastutil/ints/IntArraysTest.java
new file mode 100644
index 0000000..ebfe0d6
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntArraysTest.java
@@ -0,0 +1,591 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Arrays;
+import java.util.Random;
+
+import org.junit.Test;
+
+public class IntArraysTest {
+	
+	public static int[] identity( final int n ) {
+		final int[] perm = new int[ n ];
+		for( int i = perm.length; i-- != 0; ) perm[ i ] = i;
+		return perm;
+	}
+
+	@Test
+	public void testMergeSort() {
+		int[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }, b = a.clone(), sorted = a.clone();
+		Arrays.sort( sorted );
+		IntArrays.mergeSort( b );
+		assertArrayEquals( sorted, b );
+		IntArrays.mergeSort( b );
+		assertArrayEquals( sorted, b );
+		
+		final int[] d = a.clone();
+		IntArrays.mergeSort( d, new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return k1 - k2;
+			}
+		});
+		assertArrayEquals( sorted, d );
+
+		IntArrays.mergeSort( d, new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return k1 - k2;
+			}
+		});
+		assertArrayEquals( sorted, d );
+	}
+
+
+	@Test
+	public void testMergeSortSmallSupport() {
+		int[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		for( int to = 1; to < a.length; to++ )
+			for( int from = 0; from <= to; from++ )
+				IntArrays.mergeSort( a, from, to, new int[ to ] );
+	}
+	
+	@Test
+	public void testQuickSort() {
+		int[] a = { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 }, b = a.clone(), sorted = a.clone();
+		Arrays.sort( sorted );
+		IntArrays.quickSort( b );
+		assertArrayEquals( sorted, b );
+		IntArrays.quickSort( b );
+		assertArrayEquals( sorted, b );
+
+		final int[] d = a.clone();
+		IntArrays.quickSort( d, new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return k1 - k2;
+			}
+		});
+		assertArrayEquals( sorted, d );
+		IntArrays.quickSort( d, new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return k1 - k2;
+			}
+		});
+		assertArrayEquals( sorted, d );
+	}
+	
+	@Test
+	public void testShuffle() {
+		int[] a = new int[ 100 ];
+		for( int i = a.length; i-- != 0; ) a[ i ] = i;
+		IntArrays.shuffle( a, new Random() );
+		boolean[] b = new boolean[ a.length ];
+		for( int i = a.length; i-- != 0; ) {
+			assertFalse( b[ a[ i ] ] );
+			b[ a[ i ] ] = true;
+		}
+	}
+
+	@Test
+	public void testShuffleFragment() {
+		int[] a = new int[ 100 ];
+		for( int i = a.length; i-- != 0; ) a[ i ] = -1;
+		for( int i = 10; i < 30; i++ ) a[ i ] = i - 10;
+		IntArrays.shuffle( a, 10, 30, new Random() );
+		boolean[] b = new boolean[ 20 ];
+		for( int i = 20; i-- != 0; ) {
+			assertFalse( b[ a[ i + 10 ] ] );
+			b[ a[ i + 10 ] ] = true;
+		}
+	}
+
+	@Test
+	public void testRadixSort1() {
+		int[] t = { 2, 1, 0, 4 };
+		IntArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = new int[] { 2, -1, 0, -4 };
+		IntArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		IntArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new int[ 100 ];
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new int[ 100000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt();
+		IntArrays.radixSort( t, 10, 100 );
+		for( int i = 99; i-- != 10; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new int[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		int[][] d = new int[ 2 ][];
+
+		d[ 0 ] = new int[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3;
+		d[ 1 ] = IntArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		IntArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new int[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100;
+		d[ 1 ] = IntArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		IntArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new int[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2;
+		Random random = new Random( 0 );
+		d[ 1 ] = new int[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		IntArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new int[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new int[ d[ 0 ].length ];
+		for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		IntArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt();
+		for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt();
+		IntArrays.radixSort( d[ 0 ], d[ 1 ], 10, 100 );
+		for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new int[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new int[ d[ 0 ].length ];
+		for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		IntArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort() {
+		int[][] t = { { 2, 1, 0, 4 } };
+		IntArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+		
+		t[ 0 ] = IntArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		IntArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+
+		int[][] d = new int[ 2 ][];
+
+		d[ 0 ] = new int[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3;
+		d[ 1 ] = IntArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		IntArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new int[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100;
+		d[ 1 ] = IntArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		IntArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new int[ 10 ];
+		Random random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new int[ d[ 0 ].length ];
+		for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		IntArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new int[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new int[ d[ 0 ].length ];
+		for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		IntArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		for( int i = 100; i-- != 10; ) d[ 0 ][ i ] = random.nextInt();
+		for( int i = 100; i-- != 10; ) d[ 1 ][ i ] = random.nextInt();
+		IntArrays.radixSort( d, 10, 100 );
+		for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new int[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextInt();
+		d[ 1 ] = new int[ d[ 0 ].length ];
+		for( int i = d[ 1 ].length; i-- != 0; ) d[ 1 ][ i ] = random.nextInt();
+		IntArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSortIndirectStable() {
+		int[] t = { 2, 1, 0, 4 };
+		int[] perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		t = new int[] { 2, -1, 0, -4 };
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		
+		t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		t = new int[ 100 ];
+		perm = identity( t.length );
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		t = new int[ t.length ];
+		for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 4 ); 
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] );
+
+		t = new int[ 100 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, 10, 90, true );
+		for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] );
+		for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] );
+
+		t = new int[ 100000 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		IntArrays.shuffle( perm, new Random( 0 ) );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, 10, 100, true );
+		for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		t = new int[ 10000000 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+
+		t = new int[ t.length ];
+		for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); 
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, true );
+		for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] );
+	}
+	
+	@Test
+	public void testRadixSortIndirectUnstable() {
+		int[] t = { 2, 1, 0, 4 };
+		int[] perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		t = new int[] { 2, -1, 0, -4 };
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		
+		t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		t = new int[ 100 ];
+		perm = identity( t.length );
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		t = new int[ 100 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, 10, 90, false );
+		for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] );
+		for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] );
+
+		t = new int[ 100000 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, 10, 100, false );
+		for( int i = 99; i-- != 10; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		IntArrays.shuffle( perm, new Random( 0 ) );
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( Integer.toString( i ), t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		t = new int[ 10000000 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+	}
+	
+	@Test
+	public void testRadixSort2IndirectStable() {
+		int[] t = { 2, 1, 0, 4 };
+		int[] u = { 3, 2, 1, 0 };
+		int[] perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+		
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, t, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		u = IntArrays.shuffle( identity( 100 ), new Random( 1 ) );
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ 100 ];
+		u = new int[ 100 ];
+		perm = identity( t.length );
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ t.length ];
+		u = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		for( int i = 0; i < u.length; i++ ) t[ i ] = random.nextInt( 4 ); 
+		for( int i = 0; i < u.length; i++ ) u[ i ] = random.nextInt( 4 ); 
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] == u[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] );
+
+		t = new int[ 100 ];
+		u = new int[ 100 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = u.length; i-- != 0; ) u[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, 10, 90, true );
+		for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+		for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] );
+		for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] );
+
+		t = new int[ 100000 ];
+		u = new int[ 100000 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		IntArrays.shuffle( perm, new Random( 0 ) );
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, 10, 100, true );
+		for( int i = 99; i-- != 10; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ 10000000 ];
+		u = new int[ 10000000 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ t.length ];
+		u = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+
+		t = new int[ t.length ];
+		for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); 
+		for( int i = 0; i < t.length; i++ ) u[ i ] = random.nextInt( 8 ); 
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, true );
+		for( int i = t.length - 1; i-- != 0; ) if ( t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] == u[ perm[ i + 1 ] ] ) assertTrue( perm[ i ] < perm[ i + 1 ] );
+	}
+	
+	@Test
+	public void testRadixSort2IndirectUnstable() {
+		int[] t = { 2, 1, 0, 4 };
+		int[] u = { 3, 2, 1, 0 };
+		int[] perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] <= t[ perm[ i + 1 ] ] );
+		
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+		
+		t = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, t, false );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		t = IntArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		u = IntArrays.shuffle( identity( 100 ), new Random( 1 ) );
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ 100 ];
+		u = new int[ 100 ];
+		perm = identity( t.length );
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ t.length ];
+		u = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+		
+		for( int i = 0; i < u.length; i++ ) t[ i ] = random.nextInt( 4 ); 
+		for( int i = 0; i < u.length; i++ ) u[ i ] = random.nextInt( 4 ); 
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] ||  t[ perm[ i ] ] == t[ perm[ i + 1 ] ]&& u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ 100 ];
+		u = new int[ 100 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = u.length; i-- != 0; ) u[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, 10, 90, false );
+		for( int i = 10; i < 89; i++ ) assertTrue( Integer.toString( i ), u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+		for( int i = 0; i < 10; i++ ) assertEquals( i, perm[ i ] );
+		for( int i = 90; i < 100; i++ ) assertEquals( i, perm[ i ] );
+
+		t = new int[ 100000 ];
+		u = new int[ 100000 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		IntArrays.shuffle( perm, new Random( 0 ) );
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		for( int i = 100; i-- != 10; ) t[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, 10, 100, false );
+		for( int i = 99; i-- != 10; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ 10000000 ];
+		u = new int[ 10000000 ];
+		perm = identity( t.length );
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextInt();
+		for( int i = t.length; i-- != 0; ) u[ i ] = random.nextInt();
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+
+		t = new int[ t.length ];
+		u = new int[ t.length ];
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertEquals( i, perm[ i ] );
+
+		t = new int[ t.length ];
+		for( int i = 0; i < t.length; i++ ) t[ i ] = random.nextInt( 8 ); 
+		for( int i = 0; i < t.length; i++ ) u[ i ] = random.nextInt( 8 ); 
+		perm = identity( t.length );
+		IntArrays.radixSortIndirect( perm, t, u, false );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue(i + " " +  t[perm[i]]+ " "+ t[perm[i+1]] + " " + u[perm[i]] + " " + u[perm[i+1]] + "  " + perm[i]+ " " +perm[i+1], t[ perm[ i ] ] < t[ perm[ i + 1 ] ] || t[ perm[ i ] ] == t[ perm[ i + 1 ] ] && u[ perm[ i ] ] <= u[ perm[ i + 1 ] ] );
+	}
+	
+	@Test
+	public void testBinarySearchLargeKey() {
+		final int[] a = { 1, 2, 3 };
+		IntArrays.binarySearch( a, 4 );
+	}
+	
+	@Test
+	public void testReverse() {
+		assertArrayEquals( new int[] { 0, 1, 2, 3 }, IntArrays.reverse( new int[] { 3, 2, 1, 0 } ) );
+		assertArrayEquals( new int[] { 0, 1, 2, 3, 4 }, IntArrays.reverse( new int[] { 4, 3, 2, 1, 0 } ) );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntBigArrayBigListTest.java b/test/it/unimi/dsi/fastutil/ints/IntBigArrayBigListTest.java
new file mode 100644
index 0000000..2846aac
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntBigArrayBigListTest.java
@@ -0,0 +1,489 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.Iterator;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class IntBigArrayBigListTest {
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	private static Object[] k, nk;
+
+	private static int kt[];
+
+	private static int nkt[];
+
+	@SuppressWarnings("unchecked")
+	protected static void testLists( IntBigList m, IntBigList t, int n, int level ) {
+		Exception mThrowsOutOfBounds, tThrowsOutOfBounds;
+		Object rt = null;
+		int rm = ( 0 );
+		if ( level > 4 ) return;
+		/* Now we check that both sets agree on random keys. For m we use the polymorphic method. */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+			int T = genKey();
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.set( p, T );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.set( p, ( Integer.valueOf( T ) ) );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			
+			if ( mThrowsOutOfBounds == null ) 
+			p = r.nextInt() % ( n * 2 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.getInt( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ,  ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ aftre get() on position " + p + " (" + m.getInt( p ) + ", " + t.get( p ) + ")" ,  t.get( p ).equals( ( Integer.valueOf( m.getInt( p ) ) ) ) );
+		}
+		/* Now we check that both sets agree on random keys. For m we use the standard method. */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p+ "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ,  ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ at start on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")" ,  t.get( p ).equals( m.get( p ) ) );
+		}
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || !t.equals( m ) ) System.err.println( "m: " + m + " t: " + t );
+		assertTrue( "Error (" + level + "): ! m.equals( t ) at start" ,  m.equals( t ) );
+		assertTrue( "Error (" + level + "): ! t.equals( m ) at start" ,  t.equals( m ) );
+		/* Now we check that m actually holds that data. */
+		for ( Iterator i = t.iterator(); i.hasNext(); ) {
+			assertTrue( "Error (" + level + "): m and t differ on an entry after insertion (iterating on t)" ,  m.contains( i.next() ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( Iterator i = m.listIterator(); i.hasNext(); ) {
+			assertTrue( "Error (" + level + "): m and t differ on an entry after insertion (iterating on m)" ,  t.contains( i.next() ) );
+		}
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error (" + level + "): divergence in content between t and m (polymorphic method)" ,  m.contains( T ) == t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error (" + level + "): divergence in content between t and m (polymorphic method)" ,  m.contains( ( Integer.valueOf( T ) ) ) == t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 2 * n; i++ ) {
+			int T = genKey();
+			try {
+				m.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.add( ( Integer.valueOf( T ) ) );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			T = genKey();
+			int p = r.nextInt() % ( 2 * n + 1 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.add( p, T );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.add( p, ( Integer.valueOf( T ) ) );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): add() divergence in IndexOutOfBoundsException for index " + p + " for " + T+ " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ,  ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			p = r.nextInt() % ( 2 * n + 1 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				rm = m.removeInt( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				rt = t.remove( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): remove() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ,  ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): divergence in remove() between t and m (" + rt + ", " + rm + ")" ,  rt.equals( ( Integer.valueOf( rm ) ) ) );
+		}
+		assertTrue( "Error (" + level + "): ! m.equals( t ) after add/remove" ,  m.equals( t ) );
+		assertTrue( "Error (" + level + "): ! t.equals( m ) after add/remove" ,  t.equals( m ) );
+		/*
+		 * Now we add random data in m and t using addAll on a collection, checking that the result
+		 * is the same.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			java.util.Collection m1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for ( int j = 0; j < s; j++ )
+				m1.add( ( Integer.valueOf( genKey() ) ) );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.addAll( p, m1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.addAll( p, m1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): addAll() divergence in IndexOutOfBoundsException for index " + p + " for "+ m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ,  ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error (" + level + "," + m + t + "): ! m.equals( t ) after addAll" ,  m.equals( t ) );
+			assertTrue( "Error (" + level + "," + m + t + "): ! t.equals( m ) after addAll" ,  t.equals( m ) );
+		}
+		if ( m.size64() > n ) {
+			m.size( n );
+			while ( t.size() != n )
+				t.remove( t.size() - 1 );
+		}
+		/*
+		 * Now we add random data in m and t using addAll on a type-specific collection, checking
+		 * that the result is the same.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			IntCollection m1 = new IntBigArrayBigList();
+			java.util.Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for ( int j = 0; j < s; j++ ) {
+				int x = genKey();
+				m1.add( x );
+				t1.add( ( Integer.valueOf( x ) ) );
+			}
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.addAll( p, m1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.addAll( p, t1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): polymorphic addAll() divergence in IndexOutOfBoundsException for index "+ p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ,  ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error (" + level + "," + m + t + "): ! m.equals( t ) after polymorphic addAll" ,  m.equals( t ) );
+			assertTrue( "Error (" + level + "," + m + t + "): ! t.equals( m ) after polymorphic addAll" ,  t.equals( m ) );
+		}
+		if ( m.size64() > n ) {
+			m.size( n );
+			while ( t.size() != n )
+				t.remove( t.size() - 1 );
+		}
+		/*
+		 * Now we add random data in m and t using addAll on a list, checking that the result is the
+		 * same.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			IntBigList m1 = new IntBigArrayBigList();
+			java.util.Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for ( int j = 0; j < s; j++ ) {
+				int x = genKey();
+				m1.add( x );
+				t1.add( ( Integer.valueOf( x ) ) );
+			}
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.addAll( p, m1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.addAll( p, t1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): list addAll() divergence in IndexOutOfBoundsException for index " + p+ " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ,  ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error (" + level + "): ! m.equals( t ) after list addAll" ,  m.equals( t ) );
+			assertTrue( "Error (" + level + "): ! t.equals( m ) after list addAll" ,  t.equals( m ) );
+		}
+		/* Now we check that both sets agree on random keys. For m we use the standard method. */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): get() divergence in IndexOutOfBoundsException for index " + p + "  ("	+ mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")" ,  ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ on position " + p + " (" + m.get( p ) + ", " + t.get( p )	+ ")" ,  t.get( p ).equals( m.get( p ) ) );
+		}
+		/* Now we inquiry about the content with indexOf()/lastIndexOf(). */
+		for ( int i = 0; i < 10 * n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error (" + level + "): indexOf() divergence for " + T + "  (" + m.indexOf( ( Integer.valueOf( T ) ) ) + ", " + t.indexOf( ( Integer.valueOf( T ) ) ) + ")", m.indexOf( ( Integer.valueOf( T ) ) ) == t.indexOf( ( Integer.valueOf( T ) ) ) );
+			assertTrue( "Error (" + level + "): lastIndexOf() divergence for " + T + "  (" + m.lastIndexOf( ( Integer.valueOf( T ) ) ) + ", " + t.lastIndexOf( ( Integer.valueOf( T ) ) )	+ ")", m.lastIndexOf( ( Integer.valueOf( T ) ) ) == t.lastIndexOf( ( Integer.valueOf( T ) ) ) );
+			assertTrue( "Error (" + level + "): polymorphic indexOf() divergence for " + T + "  (" + m.indexOf( T ) + ", " + t.indexOf( ( Integer.valueOf( T ) ) ) + ")" ,  m.indexOf( T ) == t.indexOf( ( Integer.valueOf( T ) ) ) );
+			assertTrue( "Error (" + level + "): polymorphic lastIndexOf() divergence for " + T + "  (" + m.lastIndexOf( T ) + ", " + t.lastIndexOf( ( Integer.valueOf( T ) ) ) + ")" ,  m.lastIndexOf( T ) == t.lastIndexOf( ( Integer.valueOf( T ) ) ) );
+		}
+		/* Now we check cloning. */
+		if ( level == 0 ) {
+			assertTrue( "Error (" + level + "): m does not equal m.clone()" ,  m.equals( ( (IntBigArrayBigList)m ).clone() ) );
+			assertTrue( "Error (" + level + "): m.clone() does not equal m" ,  ( (IntBigArrayBigList)m ).clone().equals( m ) );
+		}
+		/* Now we play with constructors. */
+		assertTrue( "Error (" + level + "): m does not equal new ( type-specific Collection m )" ,  m.equals( new IntBigArrayBigList( (IntCollection)m ) ) );
+		assertTrue( "Error (" + level + "): new ( type-specific nCollection m ) does not equal m" ,  ( new IntBigArrayBigList( (IntCollection)m ) ).equals( m ) );
+		assertTrue( "Error (" + level + "): m does not equal new ( type-specific List m )" ,  m.equals( new IntBigArrayBigList( m ) ) );
+		assertTrue( "Error (" + level + "): new ( type-specific List m ) does not equal m" ,  ( new IntBigArrayBigList( m ) ).equals( m ) );
+		assertTrue( "Error (" + level + "): m does not equal new ( m.listIterator() )" ,  m.equals( new IntBigArrayBigList( m.listIterator() ) ) );
+		assertTrue( "Error (" + level + "): new ( m.listIterator() ) does not equal m" ,  ( new IntBigArrayBigList( m.listIterator() ) ).equals( m ) );
+		assertTrue( "Error (" + level + "): m does not equal new ( m.type_specific_iterator() )" ,  m.equals( new IntBigArrayBigList( m.iterator() ) ) );
+		assertTrue( "Error (" + level + "): new ( m.type_specific_iterator() ) does not equal m" ,  ( new IntBigArrayBigList( m.iterator() ) ).equals( m ) );
+		int h = m.hashCode();
+		/* Now we save and read m. */
+		IntBigList m2 = null;
+		try {
+			java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+			java.io.OutputStream os = new java.io.FileOutputStream( ff );
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+			oos.writeObject( m );
+			oos.close();
+			java.io.InputStream is = new java.io.FileInputStream( ff );
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+			m2 = (IntBigList)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch ( Exception e ) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+		assertTrue( "Error (" + level + "): hashCode() changed after save/read" ,  m2.hashCode() == h );
+		/* Now we check that m2 actually holds that data. */
+		assertTrue( "Error (" + level + "): ! m2.equals( t ) after save/read" ,  m2.equals( t ) );
+		assertTrue( "Error (" + level + "): ! t.equals( m2 ) after save/read" ,  t.equals( m2 ) );
+		/* Now we take out of m everything, and check that it is empty. */
+		for ( Iterator i = t.iterator(); i.hasNext(); )
+			m2.remove( i.next() );
+		assertTrue( "Error (" + level + "): m2 is not empty (as it should be)" ,  m2.isEmpty() );
+		/* Now we play with iterators. */
+		{
+			IntBigListIterator i;
+			IntBigListIterator j;
+			i = m.listIterator();
+			j = t.listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error (" + level + "): divergence in hasNext()" ,  i.hasNext() == j.hasNext() );
+				assertTrue( "Error (" + level + "): divergence in hasPrevious()" ,  i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error (" + level + "): divergence in next()" ,  i.next().equals( j.next() ) );
+					if ( r.nextFloat() < 0.2 ) {
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						int T = genKey();
+						i.set( T );
+						j.set( ( Integer.valueOf( T ) ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						int T = genKey();
+						i.add( T );
+						j.add( ( Integer.valueOf( T ) ) );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error (" + level + "): divergence in previous()" ,  i.previous().equals( j.previous() ) );
+					if ( r.nextFloat() < 0.2 ) {
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						int T = genKey();
+						i.set( T );
+						j.set( ( Integer.valueOf( T ) ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						int T = genKey();
+						i.add( T );
+						j.add( ( Integer.valueOf( T ) ) );
+					}
+				}
+				assertTrue( "Error (" + level + "): divergence in nextIndex()" ,  i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error (" + level + "): divergence in previousIndex()" ,  i.previousIndex() == j.previousIndex() );
+			}
+		}
+		{
+			Object I, J;
+			int from = r.nextInt( m.size() + 1 );
+			IntBigListIterator i;
+			IntBigListIterator j;
+			i = m.listIterator( from );
+			j = t.listIterator( from );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error (" + level + "): divergence in hasNext() (iterator with starting point " + from + ")" ,  i.hasNext() == j.hasNext() );
+				assertTrue( "Error (" + level + "): divergence in hasPrevious() (iterator with starting point " + from + ")" ,  i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					I = i.next();
+					J = j.next();
+					assertTrue( "Error (" + level + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")" ,  I.equals( J ) );
+					// System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+					if ( r.nextFloat() < 0.2 ) {
+						// System.err.println("Removing in next");
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						int T = genKey();
+						i.set( T );
+						j.set( ( Integer.valueOf( T ) ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						int T = genKey();
+						i.add( T );
+						j.add( ( Integer.valueOf( T ) ) );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					I = i.previous();
+					J = j.previous();
+					assertTrue( "Error (" + level + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point "	+ from + ")" ,  I.equals( J ) );
+					if ( r.nextFloat() < 0.2 ) {
+						// System.err.println("Removing in prev");
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						int T = genKey();
+						i.set( T );
+						j.set( ( Integer.valueOf( T ) ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						int T = genKey();
+						i.add( T );
+						j.add( ( Integer.valueOf( T ) ) );
+					}
+				}
+			}
+		}
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error (" + level + "): ! m.equals( t ) after iteration" ,  m.equals( t ) );
+		assertTrue( "Error (" + level + "): ! t.equals( m ) after iteration" ,  t.equals( m ) );
+		/* Now we select a pair of keys and create a subset. */
+		if ( !m.isEmpty() ) {
+			int start = r.nextInt( m.size() );
+			int end = start + r.nextInt( m.size() - start );
+			// System.err.println("Checking subList from " + start + " to " + end + " (level=" +
+			// (level+1) + ")..." );
+			testLists( m.subList( start, end ), t.subList( start, end ), n, level + 1 );
+			assertTrue( "Error (" + level + "," + m + t + "): ! m.equals( t ) after subList" ,  m.equals( t ) );
+			assertTrue( "Error (" + level + "): ! t.equals( m ) after subList" ,  t.equals( m ) );
+		}
+		m.clear();
+		t.clear();
+		assertTrue( "Error (" + level + "): m is not empty after clear()" ,  m.isEmpty() );
+	}
+
+	protected static void test( int n ) {
+		IntBigArrayBigList m = new IntBigArrayBigList();
+		IntBigList t = IntBigLists.asBigList( new IntArrayList() );
+		k = new Object[ n ];
+		nk = new Object[ n ];
+		kt = new int[ n ];
+		nkt = new int[ n ];
+		for ( int i = 0; i < n; i++ ) {
+			k[ i ] = new Integer( kt[ i ] = genKey() );
+			nk[ i ] = new Integer( nkt[ i ] = genKey() );
+		}
+		/* We add pairs to t. */
+		for ( int i = 0; i < n; i++ )
+			t.add( (Integer)k[ i ] );
+		/* We add to m the same data */
+		m.addAll( t );
+		testLists( m, t, n, 0 );
+		return;
+	}
+
+	@Test
+	public void test1() {
+		test( 1 );
+	}
+
+	@Test
+	public void test10() {
+		test( 10 );
+	}
+
+	@Test
+	public void test100() {
+		test( 100 );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() {
+		test( 1000 );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntBigArraysTest.java b/test/it/unimi/dsi/fastutil/ints/IntBigArraysTest.java
new file mode 100644
index 0000000..d4b6ee4
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntBigArraysTest.java
@@ -0,0 +1,232 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.*;
+import static it.unimi.dsi.fastutil.ints.IntBigArrays.set;
+import static it.unimi.dsi.fastutil.ints.IntBigArrays.get;
+
+import java.util.Arrays;
+import java.util.Random;
+
+import org.junit.Test;
+
+public class IntBigArraysTest {
+
+	
+	public static int[][] identity( final int n ) {
+		final int[][] perm = IntBigArrays.newBigArray( n );
+		for( int i = n; i-- != 0; ) IntBigArrays.set( perm, i , i );
+		return perm;
+	}
+
+	@Test
+	public void testQuickSort() {
+		int[] s = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		
+		Arrays.sort( s );
+		int[][] sorted = IntBigArrays.wrap( s.clone() );
+
+		int[][] a = IntBigArrays.wrap( s.clone()  );
+
+		IntBigArrays.quickSort( a );
+		assertArrayEquals( sorted, a );
+
+		IntBigArrays.quickSort( a );
+		assertArrayEquals( sorted, a );
+		
+		a = IntBigArrays.wrap( s.clone()  );
+		
+		IntBigArrays.quickSort( a, IntComparators.NATURAL_COMPARATOR );
+		assertArrayEquals( sorted, a );
+
+		IntBigArrays.quickSort( a, IntComparators.NATURAL_COMPARATOR );
+		assertArrayEquals( sorted, a );
+		
+	}
+
+	private void testCopy( int n ) {
+		int[][] a = IntBigArrays.newBigArray( n );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		IntBigArrays.copy( a, 0, a, 1, n - 2 );
+		assertEquals( 0, a[ 0 ][ 0 ] );
+		for ( int i = 0; i < n - 2; i++ ) assertEquals( i,  get( a, i + 1 ) );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		IntBigArrays.copy( a, 1, a, 0, n - 1 );
+		for ( int i = 0; i < n - 1; i++ ) assertEquals( i + 1, get( a, i ) );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		int[] b = new int[ n ];
+		for ( int i = 0; i < n; i++ ) b[ i ] = i;
+		assertArrayEquals( a, IntBigArrays.wrap( b ) );
+	}
+	
+	@Test
+	public void testCopy10() {
+		testCopy( 10 );
+	}
+
+	@Test
+	public void testCopy1000() {
+		testCopy( 1000 );
+	}
+
+	@Test
+	public void testCopy1000000() {
+		testCopy( 1000000 );
+	}
+
+	@Test
+	public void testBinarySearch() {
+		int[] a = new int[] { 25, 32, 1, 3, 2, 0, 40, 7, 13, 12, 11, 10, -1, -6, -18, 2000 };
+		
+		Arrays.sort( a );
+		int[][] b = IntBigArrays.wrap( a.clone() );
+
+		for( int i = -1; i < 20; i++ ) {
+			assertEquals( "" + i, Arrays.binarySearch( a, i ), IntBigArrays.binarySearch( b, i ) );
+			assertEquals( "" + i, Arrays.binarySearch( a, i ), IntBigArrays.binarySearch( b, i, IntComparators.NATURAL_COMPARATOR ) );
+		}
+	
+		for( int i = -1; i < 20; i++ ) {
+			assertEquals( Arrays.binarySearch( a, 5, 13, i ), IntBigArrays.binarySearch( b, 5, 13, i ) );
+			assertEquals( Arrays.binarySearch( a, 5, 13, i ), IntBigArrays.binarySearch( b, 5, 13, i, IntComparators.NATURAL_COMPARATOR ) );
+		}
+	}
+
+	@Test
+	public void testTrim() {
+		int[] a = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		int[][] b = IntBigArrays.wrap( a.clone() );
+
+		for( int i = a.length; i-- != 0; ) {
+			int[][] t = IntBigArrays.trim( b, i );
+			final long l = IntBigArrays.length( t );
+			assertEquals( i, l );
+			for( int p = 0; p < l; p++ ) assertEquals( a[ p ], IntBigArrays.get( t, p ) );
+			
+		}
+	}
+
+	@Test
+	public void testEquals() {
+		int[] a = new int[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		int[][] b = IntBigArrays.wrap( a.clone() );
+		int[][] c = IntBigArrays.wrap( a.clone() );
+
+		assertTrue( IntBigArrays.equals( b, c ) );
+		b[ 0 ][ 0 ] = 0;
+		assertFalse( IntBigArrays.equals( b, c ) );
+	}
+
+	@Test
+	public void testRadixSort1() {
+		int[][] t = IntBigArrays.wrap( new int[] { 2, 1, 0, 4 } );
+		IntBigArrays.radixSort( t );
+		for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) );
+		
+		t = IntBigArrays.wrap( new int[] { 2, -1, 0, -4 } );
+		IntBigArrays.radixSort( t );
+		for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) );
+		
+		t = IntBigArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		IntBigArrays.radixSort( t );
+		for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) );
+
+		t = IntBigArrays.newBigArray( 100 );
+		Random random = new Random( 0 );
+		for( long i = IntBigArrays.length( t ); i-- != 0; ) IntBigArrays.set( t, i, random.nextInt() );
+		IntBigArrays.radixSort( t );
+		for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) );
+
+		t = IntBigArrays.newBigArray( 100000 );
+		random = new Random( 0 );
+		for( long i = IntBigArrays.length( t ); i-- != 0; ) IntBigArrays.set( t, i, random.nextInt() );
+		IntBigArrays.radixSort( t );
+		for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) );
+		for( long i = 100; i-- != 10; ) IntBigArrays.set( t, i, random.nextInt() );
+		IntBigArrays.radixSort( t, 10, 100 );
+		for( long i = 99; i-- != 10; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) );
+
+		t = IntBigArrays.newBigArray( 1000000 );
+		random = new Random( 0 );
+		for( long i = IntBigArrays.length( t ); i-- != 0; ) IntBigArrays.set( t, i, random.nextInt() );
+		IntBigArrays.radixSort( t );
+		for( long i = IntBigArrays.length( t ) - 1; i-- != 0; ) assertTrue( IntBigArrays.get( t, i ) <= IntBigArrays.get( t, i + 1 ) );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		int d[][], e[][];
+		d = IntBigArrays.newBigArray( 10 );
+		for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, (int)( 3 - i % 3 ) );
+		e = IntBigArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		IntBigArrays.radixSort( d, e );
+		for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " +  IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) );
+		
+		d = IntBigArrays.newBigArray( 100000 );
+		for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, (int)( 100 - i % 100 ) );
+		e = IntBigArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		IntBigArrays.radixSort( d, e );
+		for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " +  IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) );
+
+		d = IntBigArrays.newBigArray( 10 );
+		for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, (int)( i % 3 - 2 ) );
+		Random random = new Random( 0 );
+		e = IntBigArrays.newBigArray( IntBigArrays.length(  d ) );
+		for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( e, i, random.nextInt() );
+		IntBigArrays.radixSort( d, e );
+		for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " +  IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) );
+		
+		d = IntBigArrays.newBigArray( 100000 );
+		random = new Random( 0 );
+		for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, random.nextInt() );
+		e = IntBigArrays.newBigArray( IntBigArrays.length(  d ) );
+		for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( e, i, random.nextInt() );
+		IntBigArrays.radixSort( d, e );
+		for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " +  IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) );
+		for( long i = 100; i-- != 10; ) IntBigArrays.set( e, i, random.nextInt() );
+		IntBigArrays.radixSort( d, e, 10, 100 );
+		for( long i = 99; i-- != 10; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " +  IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) );
+
+		d = IntBigArrays.newBigArray( 1000000 );
+		random = new Random( 0 );
+		for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( d, i, random.nextInt() );
+		e = IntBigArrays.newBigArray( IntBigArrays.length(  d ) );
+		for( long i = IntBigArrays.length( d ); i-- != 0; ) IntBigArrays.set( e, i, random.nextInt() );
+		IntBigArrays.radixSort( d, e );
+		for( long i = IntBigArrays.length( d ) - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + IntBigArrays.get( d, i ) + ", " + IntBigArrays.get( e, i ) + ">, <" + IntBigArrays.get( d, i + 1 ) + ", " +  IntBigArrays.get( e, i + 1 ) + ">", IntBigArrays.get( d, i ) < IntBigArrays.get( d, i + 1 ) || IntBigArrays.get( d, i ) == IntBigArrays.get( d, i + 1 ) && IntBigArrays.get( e, i ) <= IntBigArrays.get( e, i + 1 ) );
+	}
+
+
+	@Test
+	public void testShuffle() {
+		int[] a = new int[ 100 ];
+		for( int i = a.length; i-- != 0; ) a[ i ] = i;
+		int[][] b = IntBigArrays.wrap( a );
+		IntBigArrays.shuffle( b, new Random() );
+		boolean[] c = new boolean[ a.length ];
+		for( long i = IntBigArrays.length( b ); i-- != 0; ) {
+			assertFalse( c[ IntBigArrays.get( b, i ) ] );
+			c[ IntBigArrays.get( b, i ) ] = true;
+		}
+	}
+
+	@Test
+	public void testShuffleFragment() {
+		int[] a = new int[ 100 ];
+		for( int i = a.length; i-- != 0; ) a[ i ] = -1;
+		for( int i = 10; i < 30; i++ ) a[ i ] = i - 10;
+		int[][] b = IntBigArrays.wrap( a );
+		IntBigArrays.shuffle( b, 10, 30, new Random() );
+		boolean[] c = new boolean[ 20 ];
+		for( int i = 20; i-- != 0; ) {
+			assertFalse( c[ IntBigArrays.get( b, i + 10 ) ] );
+			c[ IntBigArrays.get( b, i + 10 ) ] = true;
+		}
+	}
+
+	@Test
+	public void testBinarySearchLargeKey() {
+		final int[][] a = IntBigArrays.wrap( new int[] { 1, 2, 3 } );
+		IntBigArrays.binarySearch( a, 4 );
+	}
+
+}
\ No newline at end of file
diff --git a/test/it/unimi/dsi/fastutil/ints/IntHeapSemiIndirectPriorityQueueTest.java b/test/it/unimi/dsi/fastutil/ints/IntHeapSemiIndirectPriorityQueueTest.java
new file mode 100644
index 0000000..d44ded5
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntHeapSemiIndirectPriorityQueueTest.java
@@ -0,0 +1,73 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertArrayEquals;
+
+import java.util.Arrays;
+
+import junit.framework.TestCase;
+
+import org.junit.Test;
+
+public class IntHeapSemiIndirectPriorityQueueTest extends TestCase {
+
+	public void testTops() {
+		int refArray[] = { 4, 3, 2, 1, 0, 3, 2, 1, 0, 2, 1, 0, 1, 0, 0 };
+		int tops[] = new int[ refArray.length ];
+		final IntHeapSemiIndirectPriorityQueue queue = new IntHeapSemiIndirectPriorityQueue( refArray );
+		for( int i = refArray.length; i-- != 0; ) queue.enqueue( i );
+
+		assertEquals( 5, queue.front( tops ) );
+		assertEquals( new IntOpenHashSet( new int[] { 4, 8, 11, 13, 14 } ), new IntOpenHashSet( tops, 0, 5 ) );
+		for( int i = 4; i-- != 0; ) {
+			queue.dequeue();
+			assertEquals( i + 1, queue.front( tops ) );
+		}
+		queue.dequeue();
+
+		assertEquals( 4, queue.front( tops ) );
+		assertEquals( new IntOpenHashSet( new int[] { 3, 7, 10, 12 } ), new IntOpenHashSet( tops, 0, 4 ) );
+		for( int i = 3; i-- != 0; ) {
+			queue.dequeue();
+			assertEquals( i + 1, queue.front( tops ) );
+		}
+		queue.dequeue();
+
+		assertEquals( 3, queue.front( tops ) );
+		assertEquals( new IntOpenHashSet( new int[] { 2, 6, 9 } ), new IntOpenHashSet( tops, 0, 3 ) );
+		for( int i = 2; i-- != 0; ) {
+			queue.dequeue();
+			assertEquals( i + 1, queue.front( tops ) );
+		}
+		queue.dequeue();
+
+		assertEquals( 2, queue.front( tops ) );
+		assertEquals( new IntOpenHashSet( new int[] { 1, 5 } ), new IntOpenHashSet( tops, 0, 2 ) );
+		queue.dequeue();
+		assertEquals( 1, queue.front( tops ) );
+		queue.dequeue();
+
+		assertEquals( 1, queue.front( tops ) );	
+	}
+	
+	@Test
+	public void testFrontWithComparator() {
+		final int[] refArray = { 8, 16, 9 };
+
+		IntComparator comparator = new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return ( k1 & 3 ) - ( k2 & 3 );
+			}
+		};
+
+		IntHeapSemiIndirectPriorityQueue queue = new IntHeapSemiIndirectPriorityQueue( refArray, comparator );
+		queue.enqueue( 0 );
+		queue.enqueue( 1 );
+		queue.enqueue( 2 );
+		final int[] front = new int[ 2 ];
+		assertEquals( 2, queue.front( front ) );
+		Arrays.sort( front );
+		assertArrayEquals( new int[] { 0, 1 }, front );
+	}
+	
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntLinkedOpenHashSetTest.java b/test/it/unimi/dsi/fastutil/ints/IntLinkedOpenHashSetTest.java
new file mode 100644
index 0000000..7749731
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntLinkedOpenHashSetTest.java
@@ -0,0 +1,404 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.Hash;
+
+import java.io.IOException;
+import java.util.NoSuchElementException;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class IntLinkedOpenHashSetTest {
+
+	@Test
+	public void testStrangeRetainAllCase() {
+
+		IntArrayList initialElements = IntArrayList.wrap(new int[] { 586, 940,
+				1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241,
+				1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360,
+				1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437,
+				1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547,
+				1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588,
+				1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654,
+				1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705,
+				1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756,
+				1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801,
+				1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839,
+				1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052,
+				3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137,
+				3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209,
+				3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247,
+				3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871,
+				4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925,
+				4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990,
+				4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076,
+				7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466,
+				7467 });
+
+		IntArrayList retainElements = IntArrayList.wrap(new int[] { 586 });
+
+		// Initialize both implementations with the same data
+		IntLinkedOpenHashSet instance = new IntLinkedOpenHashSet(initialElements);
+		IntRBTreeSet referenceInstance = new IntRBTreeSet(initialElements);
+
+		instance.retainAll(retainElements);
+		referenceInstance.retainAll(retainElements);
+
+		// print the correct result {586}
+		System.out.println("ref: " + referenceInstance);
+
+		// prints {586, 7379}, which is clearly wrong
+		System.out.println("ohm: " + instance);
+
+		// Fails
+		assertEquals( referenceInstance, instance );
+	}
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	@SuppressWarnings("unchecked")
+	private static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		int c;
+		IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE, f );
+		java.util.Set<Integer> t = new java.util.LinkedHashSet<Integer>();
+		/* First of all, we fill t with random data. */
+		for ( int i = 0; i < f * n; i++ )
+			t.add( ( Integer.valueOf( genKey() ) ) );
+		/* Now we add to m the same data */
+		s.addAll( t );
+		assertTrue( "Error: !m.equals(t) after insertion", s.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( s ) );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", s.contains( e ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		c = 0;
+		for ( java.util.Iterator i = s.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			c++;
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) );
+		}
+		assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", t.size(), c );
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence in keys between t and m (polymorphic method)", s.contains( T ) == t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence between t and m (standard method)", s.contains( ( Integer.valueOf( T ) ) ) == t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence in add() between t and m", s.add( ( Integer.valueOf( T ) ) ) == t.add( ( Integer.valueOf( T ) ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m", s.remove( ( Integer.valueOf( T ) ) ) == t.remove( ( Integer.valueOf( T ) ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after removal", s.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( s ) );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", s.contains( e ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = s.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", t.contains( e ) );
+		}
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		int a[] = s.toIntArray();
+		assertEquals( "Error: toArray() output (or array-based constructor) is not OK", new IntLinkedOpenHashSet( a ), s );
+		/* Now we check cloning. */
+		assertTrue( "Error: m does not equal m.clone()", s.equals( s.clone() ) );
+		assertTrue( "Error: m.clone() does not equal m", s.clone().equals( s ) );
+		int h = s.hashCode();
+		/* Now we save and read m. */
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( s );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		s = (IntLinkedOpenHashSet)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertEquals( "Error: hashCode() changed after save/read", h, s.hashCode() );
+
+		assertEquals( "Error: clone()", s, s.clone() );
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = s.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after save/read", t.contains( e ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence in add() between t and m after save/read", s.add( ( Integer.valueOf( T ) ) ) == t.add( ( Integer.valueOf( T ) ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m after save/read", s.remove( ( Integer.valueOf( T ) ) ) == t.remove( ( Integer.valueOf( T ) ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", s.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( s ) );
+		/* Now we play with iterators, but only in the linked case. */
+		{
+			java.util.ListIterator<Integer> i, j;
+			Integer J;
+			i = s.iterator();
+			j = new java.util.LinkedList<Integer>( t ).listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next()", i.next().equals( J = j.next() ) );
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous()", i.previous().equals( J = j.previous() ) );
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() );
+			}
+		}
+		if ( t.size() > 0 ) {
+			java.util.ListIterator i, j;
+			Object J;
+			j = new java.util.LinkedList( t ).listIterator();
+			int e = r.nextInt( t.size() );
+			Object from;
+			do
+				from = j.next();
+			while ( e-- != 0 );
+			i = s.iterator( ( ( ( (Integer)( from ) ).intValue() ) ) );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", i.next().equals( J = j.next() ) );
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", i.previous().equals( J = j.previous() ) );
+					if ( r.nextFloat() < 0.5 ) {
+						i.remove();
+						j.remove();
+						t.remove( J );
+					}
+				}
+				assertTrue( "Error: divergence in nextIndex() (iterator with starting point " + from + ")", i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex() (iterator with starting point " + from + ")", i.previousIndex() == j.previousIndex() );
+			}
+		}
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error: ! m.equals( t ) after iteration", s.equals( t ) );
+		assertTrue( "Error: ! t.equals( m ) after iteration", t.equals( s ) );
+		/* Now we take out of m everything, and check that it is empty. */
+		for ( java.util.Iterator i = s.iterator(); i.hasNext(); ) {
+			i.next();
+			i.remove();
+		}
+		assertTrue( "Error: m is not empty (as it should be)", s.isEmpty() );
+		s.clear();
+		t.clear();
+		s.trim();
+		assertTrue( "Error: !m.equals(t) after rehash()", s.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after rehash()", t.equals( s ) );
+		s.trim();
+		assertTrue( "Error: !m.equals(t) after trim()", s.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after trim()", t.equals( s ) );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void testAdd() {
+		IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE );
+		assertTrue( s.add( 0 ) );
+		assertTrue( s.contains( 0 ) );
+		assertFalse( s.contains( 1 ) );
+		assertTrue( s.add( Integer.valueOf( 1 ) ) );
+		assertTrue( s.contains( Integer.valueOf( 1 ) ) );
+		assertFalse( s.contains( Integer.valueOf( 2 ) ) );
+	}
+
+	@Test
+	public void testRemove() {
+		IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE );
+		for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) );
+		for( int i = 0; i < 100; i++ ) assertFalse( s.remove( 100 + i ) );
+		assertEquals( 0, s.firstInt() );
+		assertEquals( 99, s.lastInt() );
+		for( int i = 50; i < 150; i++ ) assertTrue( s.remove( i % 100 ) );
+	}
+
+	@Test
+	public void testIterator() {
+		IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE );
+		for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) );
+		assertEquals( 0, s.firstInt() );
+		
+		IntListIterator iterator = s.iterator();
+		for( int i = 0; i <= 100; i++ ) {
+			assertEquals( Integer.toString( i ), i - 1, iterator.previousIndex() );
+			assertEquals( Integer.toString( i ), i, iterator.nextIndex() );
+			if ( i != 100 ) assertEquals( Integer.toString( i ), i, iterator.nextInt() );
+		}
+
+		iterator = s.iterator( s.lastInt() );
+		for( int i = 100; i-- != 0; ) {
+			assertEquals( Integer.toString( i ), i, iterator.previousIndex() );
+			assertEquals( Integer.toString( i ), i + 1, iterator.nextIndex() );
+			if ( i != 0 ) assertEquals( Integer.toString( i ), i, iterator.previousInt() );
+		}
+
+		iterator = s.iterator( 50 );
+		for( int i = 50; i < 100; i++ ) {
+			assertEquals( Integer.toString( i ), i, iterator.previousIndex() );
+			assertEquals( Integer.toString( i ), i + 1, iterator.nextIndex() );
+			if ( i != 99 ) assertEquals( Integer.toString( i ), i + 1, iterator.nextInt() );
+		}
+
+		iterator = s.iterator( 50 );
+		for( int i = 50; i-- != -1; ) {
+			assertEquals( Integer.toString( i ), i + 1, iterator.previousIndex() );
+			assertEquals( Integer.toString( i ), i + 2, iterator.nextIndex() );
+			if ( i != -1 ) assertEquals( Integer.toString( i ), i + 1, iterator.previousInt() );
+		}
+
+		iterator = s.iterator( 50 );
+		for( int i = 50; i-- != -1; ) assertEquals( Integer.toString( i ), i + 1, iterator.previousInt() );
+		assertEquals( -1, iterator.previousIndex() );
+		assertEquals( 0, iterator.nextIndex() );
+		
+		iterator = s.iterator( 50 );
+		for( int i = 50; i < 100 - 1; i++ ) assertEquals( Integer.toString( i ), i + 1, iterator.nextInt() );
+		assertEquals( 99, iterator.previousIndex() );
+		assertEquals( 100, iterator.nextIndex() );
+
+		iterator = s.iterator( 50 );
+		iterator.previousInt();
+		iterator.remove();
+		assertEquals( 49, iterator.previousIndex() );
+		assertEquals( 49, iterator.previousInt() );
+		
+		iterator = s.iterator( 49 );
+		iterator.nextInt();
+		iterator.remove();
+		assertEquals( 50, iterator.nextIndex() );
+		assertEquals( 52, iterator.nextInt() );
+	}
+	
+	@Test(expected=NoSuchElementException.class)
+	public void testIteratorMissingElement() {
+		IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE );
+		for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) );
+		s.iterator( 1000 );
+	}
+
+	@Test
+	public void testPutAndMove() {
+		IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE );
+		for( int i = 0; i < 100; i++ ) assertTrue( s.addAndMoveToFirst( i ) );
+		s.clear();
+		for( int i = 0; i < 100; i++ ) assertTrue( s.addAndMoveToLast( i ) );
+		assertTrue( s.addAndMoveToFirst( -1 ) );
+		assertEquals( -1, s.firstInt() );
+		assertTrue( s.addAndMoveToFirst( -2 ) );
+		assertEquals( -2, s.firstInt() );
+		assertFalse( s.addAndMoveToFirst( -1 ) );
+		assertEquals( -1, s.firstInt() );
+		assertFalse( s.addAndMoveToFirst( -1 ) );
+		assertEquals( -1, s.firstInt() );
+		assertFalse( s.addAndMoveToLast( -1 ) );
+		assertEquals( -1, s.lastInt() );
+		assertTrue( s.addAndMoveToLast( 100 ) );
+		assertEquals( 100, s.lastInt() );
+		assertTrue( s.addAndMoveToLast( 101 ) );
+		assertEquals( 101, s.lastInt() );
+		assertFalse( s.addAndMoveToLast( 100 ) );
+		assertEquals( 100, s.lastInt() );
+		assertFalse( s.addAndMoveToLast( 100 ) );
+		assertEquals( 100, s.lastInt() );
+		assertFalse( s.addAndMoveToFirst( 100 ) );
+		assertEquals( 100, s.firstInt() );
+	}
+
+	@Test
+	public void testRemoveFirstLast() {
+		IntLinkedOpenHashSet s = new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE );
+		for( int i = 0; i < 100; i++ ) assertTrue( s.add( i ) );
+		assertEquals( 0, s.removeFirstInt() );
+		assertEquals( 1, s.removeFirstInt() );
+		assertEquals( 99, s.removeLastInt() );
+	}	
+
+	@Test(expected=NoSuchElementException.class)
+	public void testRemoveFirstEmpty() {
+		new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ).firstInt();
+	}
+
+	@Test(expected=NoSuchElementException.class)
+	public void testRemoveLastEmpty() {
+		new IntLinkedOpenHashSet( Hash.DEFAULT_INITIAL_SIZE ).lastInt();
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntOpenCustomHashSetTest.java b/test/it/unimi/dsi/fastutil/ints/IntOpenCustomHashSetTest.java
new file mode 100644
index 0000000..2b2ea4c
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntOpenCustomHashSetTest.java
@@ -0,0 +1,291 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.Hash;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.HashSet;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+
+/** Not a particularly good test, but it will check that we use everywhere the same hashing strategy. */
+
+public class IntOpenCustomHashSetTest {
+
+	private static final class Strategy implements IntHash.Strategy, Serializable {
+		private static final long serialVersionUID = 1L;
+
+		@Override
+		public int hashCode( int e ) {
+			return Integer.reverse( e );
+		}
+
+		@Override
+		public boolean equals( int a, int b ) {
+			return a == b;
+		}
+	}
+
+	private final static Strategy strategy = new Strategy();
+	
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt( 10 );
+	}
+	
+	@SuppressWarnings("boxing")
+	private static void checkTable( IntOpenCustomHashSet s ) {
+		final boolean[] used = s.used;
+		final int[]key = s.key;
+		assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n;
+		assert s.n == s.key.length;
+		assert s.n == used.length;
+		int n = s.n;
+		while ( n-- != 0 )
+			if ( used[ n ] && !s.contains( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ]
+					+ " marked as occupied, but the key does not belong to the table" );
+
+		java.util.HashSet<Integer> t = new java.util.HashSet<Integer>();
+		for ( int i = s.size(); i-- != 0; )
+			if ( used[ i ] && !t.add( key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice" );
+
+	}
+
+	private static void printProbes( IntOpenCustomHashSet m ) {
+		long totProbes = 0;
+		double totSquareProbes = 0;
+		int maxProbes = 0;
+		final double f = (double)m.size / m.n;
+		for ( int i = 0, c = 0; i < m.n; i++ ) {
+			if ( m.used[ i ] ) c++;
+			else {
+				if ( c != 0 ) {
+					final long p = ( c + 1 ) * ( c + 2 ) / 2;
+					totProbes += p;
+					totSquareProbes += (double)p * p;
+				}
+				maxProbes = Math.max( c, maxProbes );
+				c = 0;
+				totProbes++;
+				totSquareProbes++;
+			}
+		}
+
+		final double expected = (double)totProbes / m.n;
+		System.err.println( "Expected probes: " + (
+				3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1
+				) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes );
+	}
+
+	@SuppressWarnings({ "boxing" })
+	private static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		int c;
+		final Integer key[] = new Integer[ (int)Math.ceil( n * f ) ];
+		HashSet<Integer> t = new HashSet<Integer>();
+		/* First of all, we fill t with random data. */
+
+		for ( int i = 0; i < key.length; i++ ) t.add( ( key[ i ] = new Integer( genKey() ) ) );
+
+		IntOpenCustomHashSet m = new IntOpenCustomHashSet( Hash.DEFAULT_INITIAL_SIZE, f, strategy );
+
+		
+		/* Now we add to m the same data */
+
+		m.addAll( t );
+		checkTable( m );
+		
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		printProbes( m );
+
+		/* Now we check that m actually holds that data. */
+
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		c = 0;
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			c++;
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) );
+		}
+
+		assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() );
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertEquals( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ), t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence between t and m (standard method)", m.contains( ( Integer.valueOf( T ) ) ) != t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence in add() between t and m", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) );
+			T = genKey();
+			assertFalse( "Error: divergence in remove() between t and m", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) );
+		}
+
+		checkTable( m );
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		/* Now we check that m actually holds that data. */
+
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", !m.contains( e ) );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", !t.contains( e ) );
+		}
+
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		int a[] = m.toIntArray();
+
+		assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new IntOpenHashSet( a ).equals( m ) );
+
+		/* Now we check cloning. */
+
+		assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) );
+		assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) );
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+
+		oos.writeObject( m );
+		oos.close();
+
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+
+		m = (IntOpenCustomHashSet)ois.readObject();
+		ois.close();
+		ff.delete();
+
+		assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() );
+
+		printProbes( m );
+		checkTable( m );
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after save/read", !t.contains( e ) );
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence in add() between t and m after save/read", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) );
+			T = genKey();
+			assertFalse( "Error: divergence in remove() between t and m after save/read", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) );
+		}
+
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) );
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			i.next();
+			i.remove();
+		}
+
+		assertFalse( "Error: m is not empty (as it should be)", !m.isEmpty() );
+
+
+		m = new IntOpenCustomHashSet( n, f, strategy );
+		t.clear();
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		for( int i = n; i-- != 0; ) m.add( i );
+		t.addAll( m );
+		printProbes( m );
+		checkTable( m );
+
+		for( int i = n; i-- != 0; )
+			assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.add( i ), t.add( ( Integer.valueOf( i ) ) ) );
+
+		assertTrue( "Error: !m.equals(t) after torture-test insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test insertion", t.equals( m ) );
+
+		for( int i = n; i-- != 0; )
+			assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.remove( i ), t.remove( ( Integer.valueOf( i ) ) ) );
+
+		assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) );
+		assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) );
+		assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) );
+
+		return;
+	}
+
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntOpenHashBigSetTest.java b/test/it/unimi/dsi/fastutil/ints/IntOpenHashBigSetTest.java
new file mode 100644
index 0000000..2052718
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntOpenHashBigSetTest.java
@@ -0,0 +1,321 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.booleans.BooleanBigArrays;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class IntOpenHashBigSetTest {
+
+	@Test
+	public void testStrangeRetainAllCase() {
+
+		IntArrayList initialElements = IntArrayList.wrap(new int[] { 586, 940,
+				1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241,
+				1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360,
+				1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437,
+				1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547,
+				1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588,
+				1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654,
+				1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705,
+				1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756,
+				1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801,
+				1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839,
+				1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052,
+				3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137,
+				3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209,
+				3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247,
+				3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871,
+				4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925,
+				4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990,
+				4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076,
+				7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466,
+				7467 });
+
+		IntArrayList retainElements = IntArrayList.wrap(new int[] { 586 });
+
+		// Initialize both implementations with the same data
+		IntOpenHashBigSet instance = new IntOpenHashBigSet(initialElements);
+		IntRBTreeSet referenceInstance = new IntRBTreeSet(initialElements);
+
+		instance.retainAll(retainElements);
+		referenceInstance.retainAll(retainElements);
+
+		// print the correct result {586}
+		System.out.println("ref: " + referenceInstance);
+
+		// prints {586, 7379}, which is clearly wrong
+		System.out.println("ohm: " + instance);
+
+		// Fails
+		assertEquals( referenceInstance, instance );
+	}	
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	@SuppressWarnings("boxing")
+	private static void checkTable( IntOpenHashBigSet s ) {
+		final boolean[][] used = s.used;
+		final int[][] key = s.key;
+		assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n;
+		assert s.n == IntBigArrays.length( s.key );
+		assert s.n == BooleanBigArrays.length( used );
+		long n = s.n;
+		while ( n-- != 0 )
+			if ( BooleanBigArrays.get( used, n ) && !s.contains( IntBigArrays.get( key, n ) ) ) throw new AssertionError( "Hash table has key " + IntBigArrays.get( key, n )
+					+ " marked as occupied, but the key does not belong to the table" );
+
+		java.util.HashSet<Integer> t = new java.util.HashSet<Integer>();
+		for ( long i = s.size64(); i-- != 0; )
+			if ( BooleanBigArrays.get( used, i ) && !t.add( IntBigArrays.get( key, i ) ) ) throw new AssertionError( "Key " + IntBigArrays.get( key, i ) + " appears twice" );
+
+	}
+
+	private static void printProbes( IntOpenHashBigSet m ) {
+		long totProbes = 0;
+		double totSquareProbes = 0;
+		long maxProbes = 0;
+		final double f = (double)m.size / m.n;
+		for ( long i = 0, c = 0; i < m.n; i++ ) {
+			if ( BooleanBigArrays.get( m.used, i ) ) c++;
+			else {
+				if ( c != 0 ) {
+					final long p = ( c + 1 ) * ( c + 2 ) / 2;
+					totProbes += p;
+					totSquareProbes += (double)p * p;
+				}
+				maxProbes = Math.max( c, maxProbes );
+				c = 0;
+				totProbes++;
+				totSquareProbes++;
+			}
+		}
+
+		final double expected = (double)totProbes / m.n;
+		System.err.println( "Expected probes: " + (
+				3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1
+				) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes );
+	}
+
+	@SuppressWarnings({ "unchecked", "boxing" })
+	private static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		int c;
+		IntOpenHashBigSet m = new IntOpenHashBigSet( Hash.DEFAULT_INITIAL_SIZE, f );
+		java.util.Set t = new java.util.HashSet();
+
+		/* First of all, we fill t with random data. */
+
+		for ( int i = 0; i < f * n; i++ )
+			t.add( ( Integer.valueOf( genKey() ) ) );
+
+		/* Now we add to m the same data */
+
+		m.addAll( t );
+		checkTable( m );
+		
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		printProbes( m );
+
+		/* Now we check that m actually holds that data. */
+
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		c = 0;
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			c++;
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) );
+		}
+
+		assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() );
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertEquals( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ), t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence between t and m (standard method)", m.contains( ( Integer.valueOf( T ) ) ) != t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence in add() between t and m", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) );
+			T = genKey();
+			assertFalse( "Error: divergence in remove() between t and m", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) );
+		}
+
+		checkTable( m );
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		/* Now we check that m actually holds that data. */
+
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", !m.contains( e ) );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", !t.contains( e ) );
+		}
+
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		int a[] = m.toIntArray();
+
+		assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new IntOpenHashBigSet( a ).equals( m ) );
+
+		/* Now we check cloning. */
+
+		assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) );
+		assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) );
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+
+		oos.writeObject( m );
+		oos.close();
+
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+
+		m = (IntOpenHashBigSet)ois.readObject();
+		ois.close();
+		ff.delete();
+
+		assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() );
+
+		printProbes( m );
+		checkTable( m );
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after save/read", !t.contains( e ) );
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence in add() between t and m after save/read", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) );
+			T = genKey();
+			assertFalse( "Error: divergence in remove() between t and m after save/read", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) );
+		}
+
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) );
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			i.next();
+			i.remove();
+		}
+
+		assertFalse( "Error: m is not empty (as it should be)", !m.isEmpty() );
+
+
+		m = new IntOpenHashBigSet( n, f );
+		t.clear();
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		for( int i = n; i-- != 0; ) m.add( i );
+		t.addAll( m );
+		printProbes( m );
+		checkTable( m );
+
+		/* Now all table entries are REMOVED. */
+
+		for( int i = n; i-- != 0; )
+			assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.add( i ), t.add( ( Integer.valueOf( i ) ) ) );
+
+		assertTrue( "Error: !m.equals(t) after torture-test insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test insertion", t.equals( m ) );
+
+		for( int i = n; i-- != 0; )
+			assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.remove( i ), t.remove( ( Integer.valueOf( i ) ) ) );
+
+		assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) );
+		assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) );
+		assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) );
+		m.trim();
+
+		assertTrue( "Error: !m.equals(t) after trim()", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after trim()", t.equals( m ) );
+
+		return;
+	}
+
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntOpenHashSetTest.java b/test/it/unimi/dsi/fastutil/ints/IntOpenHashSetTest.java
new file mode 100644
index 0000000..4a9252a
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntOpenHashSetTest.java
@@ -0,0 +1,315 @@
+package it.unimi.dsi.fastutil.ints;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+
+import it.unimi.dsi.fastutil.Hash;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class IntOpenHashSetTest {
+
+
+	@Test
+	public void testStrangeRetainAllCase() {
+
+		IntArrayList initialElements = IntArrayList.wrap(new int[] { 586, 940,
+				1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241,
+				1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360,
+				1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437,
+				1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547,
+				1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588,
+				1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654,
+				1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705,
+				1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756,
+				1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801,
+				1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839,
+				1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052,
+				3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137,
+				3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209,
+				3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247,
+				3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871,
+				4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925,
+				4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990,
+				4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076,
+				7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466,
+				7467 });
+
+		IntArrayList retainElements = IntArrayList.wrap(new int[] { 586 });
+
+		// Initialize both implementations with the same data
+		IntOpenHashSet instance = new IntOpenHashSet(initialElements);
+		IntRBTreeSet referenceInstance = new IntRBTreeSet(initialElements);
+
+		instance.retainAll(retainElements);
+		referenceInstance.retainAll(retainElements);
+
+		// print the correct result {586}
+		System.out.println("ref: " + referenceInstance);
+
+		// prints {586, 7379}, which is clearly wrong
+		System.out.println("ohm: " + instance);
+
+		// Fails
+		assertEquals( referenceInstance, instance );
+	}	
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	@SuppressWarnings("boxing")
+	private static void checkTable( IntOpenHashSet s ) {
+		final boolean[] used = s.used;
+		final int[]key = s.key;
+		assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n;
+		assert s.n == s.key.length;
+		assert s.n == used.length;
+		int n = s.n;
+		while ( n-- != 0 )
+			if ( used[ n ] && !s.contains( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ]
+					+ " marked as occupied, but the key does not belong to the table" );
+
+		java.util.HashSet<Integer> t = new java.util.HashSet<Integer>();
+		for ( int i = s.size(); i-- != 0; )
+			if ( used[ i ] && !t.add( key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice" );
+
+	}
+
+	private static void printProbes( IntOpenHashSet m ) {
+		long totProbes = 0;
+		double totSquareProbes = 0;
+		int maxProbes = 0;
+		final double f = (double)m.size / m.n;
+		for ( int i = 0, c = 0; i < m.n; i++ ) {
+			if ( m.used[ i ] ) c++;
+			else {
+				if ( c != 0 ) {
+					final long p = ( c + 1 ) * ( c + 2 ) / 2;
+					totProbes += p;
+					totSquareProbes += (double)p * p;
+				}
+				maxProbes = Math.max( c, maxProbes );
+				c = 0;
+				totProbes++;
+				totSquareProbes++;
+			}
+		}
+
+		final double expected = (double)totProbes / m.n;
+		System.err.println( "Expected probes: " + (
+				3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1
+				) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes );
+	}
+
+	@SuppressWarnings({ "unchecked", "boxing" })
+	private static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		int c;
+		IntOpenHashSet m = new IntOpenHashSet( Hash.DEFAULT_INITIAL_SIZE, f );
+		java.util.Set t = new java.util.HashSet();
+
+		/* First of all, we fill t with random data. */
+
+		for ( int i = 0; i < Math.ceil( f * n ); i++ )
+			t.add( ( Integer.valueOf( genKey() ) ) );
+
+		/* Now we add to m the same data */
+
+		m.addAll( t );
+		checkTable( m );
+		
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		printProbes( m );
+
+		/* Now we check that m actually holds that data. */
+
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		c = 0;
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			c++;
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) );
+		}
+
+		assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() );
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertEquals( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ), t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence between t and m (standard method)", m.contains( ( Integer.valueOf( T ) ) ) != t.contains( ( Integer.valueOf( T ) ) ) );
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence in add() between t and m", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) );
+			T = genKey();
+			assertFalse( "Error: divergence in remove() between t and m", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) );
+		}
+
+		checkTable( m );
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		/* Now we check that m actually holds that data. */
+
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", !m.contains( e ) );
+		}
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", !t.contains( e ) );
+		}
+
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		int a[] = m.toIntArray();
+
+		assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new IntOpenHashSet( a ).equals( m ) );
+
+		/* Now we check cloning. */
+
+		assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) );
+		assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) );
+
+		int h = m.hashCode();
+
+		/* Now we save and read m. */
+
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+
+		oos.writeObject( m );
+		oos.close();
+
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+
+		m = (IntOpenHashSet)ois.readObject();
+		ois.close();
+		ff.delete();
+
+		assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() );
+
+		printProbes( m );
+		checkTable( m );
+
+		/* Now we check that m actually holds that data, but iterating on m. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertFalse( "Error: m and t differ on a key (" + e + ") after save/read", !t.contains( e ) );
+		}
+
+
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			assertFalse( "Error: divergence in add() between t and m after save/read", m.add( ( Integer.valueOf( T ) ) ) != t.add( ( Integer.valueOf( T ) ) ) );
+			T = genKey();
+			assertFalse( "Error: divergence in remove() between t and m after save/read", m.remove( ( Integer.valueOf( T ) ) ) != t.remove( ( Integer.valueOf( T ) ) ) );
+		}
+
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) );
+
+		/* Now we take out of m everything, and check that it is empty. */
+
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			i.next();
+			i.remove();
+		}
+
+		assertFalse( "Error: m is not empty (as it should be)", !m.isEmpty() );
+
+
+		m = new IntOpenHashSet( n, f );
+		t.clear();
+
+		/* Now we torture-test the hash table. This part is implemented only for integers and longs. */
+
+		for( int i = n; i-- != 0; ) m.add( i );
+		t.addAll( m );
+		printProbes( m );
+		checkTable( m );
+
+		for( int i = n; i-- != 0; )
+			assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.add( i ), t.add( ( Integer.valueOf( i ) ) ) );
+
+		assertTrue( "Error: !m.equals(t) after torture-test insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test insertion", t.equals( m ) );
+
+		for( int i = n; i-- != 0; )
+			assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.remove( i ), t.remove( ( Integer.valueOf( i ) ) ) );
+
+		assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) );
+		assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) );
+		assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) );
+
+		return;
+	}
+
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/IntSemiIndirectHeapsTest.java b/test/it/unimi/dsi/fastutil/ints/IntSemiIndirectHeapsTest.java
new file mode 100644
index 0000000..c74c44e
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/IntSemiIndirectHeapsTest.java
@@ -0,0 +1,46 @@
+package it.unimi.dsi.fastutil.ints;
+
+import java.util.Arrays;
+
+import org.junit.Test;
+
+import it.unimi.dsi.fastutil.ints.IntSemiIndirectHeaps;
+import static org.junit.Assert.*;
+
+
+public class IntSemiIndirectHeapsTest {
+	
+	@Test
+	public void testFront() {
+		final int numBits = 20;
+		int[] refArray = new int[ 100 ], heap = new int[ 100 ], front = new int[ 100 ];
+
+		for( int i = ( 1 << numBits ) - 1; i-- != 0; ) {
+			for( int j = 0; j < numBits; j++ ) {
+				refArray[ j ] = ( i & ( 1 << j ) );
+				heap[ j ] = j;
+			}
+
+			IntSemiIndirectHeaps.makeHeap( refArray, heap, numBits, null );
+			assertEquals( "Heap " + Integer.toBinaryString( i ), numBits - Integer.bitCount( i ), IntSemiIndirectHeaps.front( refArray, heap, numBits, front ) );
+		}
+	}
+
+	@Test
+	public void testFrontWithComparator() {
+		final int[] refArray = { 8, 16, 9 };
+		final int[] heap = { 2, 1, 0 };
+
+		IntComparator comparator = new AbstractIntComparator() {
+			@Override
+			public int compare( int k1, int k2 ) {
+				return ( k1 & 3 ) - ( k2 & 3 );
+			}
+		};
+		IntSemiIndirectHeaps.makeHeap( refArray, heap, 3, comparator );
+		final int[] front = new int[ 2 ];
+		assertEquals( 2, IntSemiIndirectHeaps.front( refArray, heap, 3, front, comparator ) );
+		Arrays.sort( front );
+		assertArrayEquals( new int[] { 0, 1 }, front );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/ints/StripedInt2IntOpenHashMapTest.java b/test/it/unimi/dsi/fastutil/ints/StripedInt2IntOpenHashMapTest.java
new file mode 100644
index 0000000..e44da20
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/ints/StripedInt2IntOpenHashMapTest.java
@@ -0,0 +1,224 @@
+package it.unimi.dsi.fastutil.ints;
+
+import it.unimi.dsi.fastutil.Hash;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.junit.Ignore;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+ at SuppressWarnings("rawtypes")
+public class StripedInt2IntOpenHashMapTest {
+
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	private static int genValue() {
+		return r.nextInt();
+	}
+
+	private static boolean valEquals( Object o1, Object o2 ) {
+		return o1 == null ? o2 == null : o1.equals( o2 );
+	}
+
+	@SuppressWarnings({ "unchecked", "boxing" })
+	protected static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		StripedInt2IntOpenHashMap m = new StripedInt2IntOpenHashMap();
+		Map t = new java.util.HashMap();
+		/* First of all, we fill t with random data. */
+		for ( int i = 0; i < n; i++ )
+			t.put( ( Integer.valueOf( genKey() ) ), ( Integer.valueOf( genValue() ) ) );
+		/* Now we add to m the same data */
+		m.putAll( t );
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		/*
+		 * Now we check that m actually holds that data.
+		 */
+		for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds the same keys. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key (" + o + ") after insertion (iterating on t)", m.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after insertion (iterating on t)", m.keySet().contains( o ) );
+		}
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key after insertion (iterating on m)", t.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (in keySet()) after insertion (iterating on m)", t.keySet().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values. */
+		for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after insertion (iterating on t)", m.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on t)", m.values().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after insertion (iterating on m)", t.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on m)", t.values().contains( o ) );
+		}
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.containsKey( ( Integer.valueOf( T ) ) ) == t.containsKey( ( Integer.valueOf( T ) ) ) );
+			assertTrue( "Error: divergence between t and m (polymorphic method)",
+					!( m.get( T ) != ( 0 ) ) != ( ( t.get( ( Integer.valueOf( T ) ) ) == null ? ( 0 ) : ( ( ( (Integer)( t.get( ( Integer.valueOf( T ) ) ) ) ).intValue() ) ) ) != ( 0 ) ) ||
+							t.get( ( Integer.valueOf( T ) ) ) != null &&
+							!m.get( ( Integer.valueOf( T ) ) ).equals( t.get( ( Integer.valueOf( T ) ) ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int T = genKey();
+			assertTrue( "Error: divergence between t and m (standard method)", valEquals( m.get( ( Integer.valueOf( T ) ) ), t.get( ( Integer.valueOf( T ) ) ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			int U = genValue();
+			assertTrue( "Error: divergence in put() between t and m",
+					valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m", valEquals( m.remove( ( Integer.valueOf( T ) ) ), t.remove( ( Integer.valueOf( T ) ) ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		/*
+		 * Now we check that m actually holds the same data.
+		 */
+		for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds the same keys. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key (" + o + ") after removal (iterating on t)", m.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after removal (iterating on t)", m.keySet().contains( o ) );
+		}
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key after removal (iterating on m)", t.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (in keySet()) after removal (iterating on m)", t.keySet().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values. */
+		for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after removal (iterating on t)", m.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on t)", m.values().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after removal (iterating on m)", t.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on m)", t.values().contains( o ) );
+		}
+		int h = m.hashCode();
+		/* Now we save and read m. */
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (StripedInt2IntOpenHashMap)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertEquals( "Error: hashCode() changed after save/read", m.hashCode(), h );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on an entry after save/read", valEquals( m.get( o ), t.get( o ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			int T = genKey();
+			int U = genValue();
+			assertTrue( "Error: divergence in put() between t and m after save/read",
+					valEquals( m.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ), t.put( ( Integer.valueOf( T ) ), ( Integer.valueOf( U ) ) ) ) );
+			T = genKey();
+			Integer result;
+			assertTrue( "Error: divergence in remove() between t and m after save/read", valEquals( m.remove( T ), ( result = (Integer)t.remove( ( Integer.valueOf( T ) ) ) ) != null ? result.intValue() : 0 ) );
+		}
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) );
+		/*
+		 * Now we take out of m everything , and check that it is empty.
+		 */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); )
+			m.remove( i.next() );
+		assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() );
+		m = new StripedInt2IntOpenHashMap();
+		t.clear();
+		for( int i = n; i-- != 0; ) m.put( i, 1 );
+		t.putAll( m );
+		for( int i = n; i-- != 0; ) assertEquals( "Error: m and t differ on a key during torture-test insertion.", m.put( i, 2 ), t.put( Integer.valueOf( i ), 2 ) );	
+		
+		assertTrue( "Error: !m.equals(t) after torture-test removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after torture-test removal", t.equals( m ) );
+		//assertTrue( "Error: !m.equals(m.clone()) after torture-test removal", m.equals( m.clone() ) );
+		//assertTrue( "Error: !m.clone().equals(m) after torture-test removal", m.clone().equals( m ) );
+		//m.trim();
+		assertTrue( "Error: !m.equals(t) after trim()", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after trim()", t.equals( m ) );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/io/BinIOTest.java b/test/it/unimi/dsi/fastutil/io/BinIOTest.java
new file mode 100644
index 0000000..a3210a5
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/io/BinIOTest.java
@@ -0,0 +1,194 @@
+package it.unimi.dsi.fastutil.io;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.bytes.ByteBigArrays;
+import it.unimi.dsi.fastutil.doubles.DoubleIterator;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.junit.Test;
+
+public class BinIOTest {
+
+	final static byte[] SMALL = new byte[ 1024 ];
+	final static byte[] LARGE = new byte[ 1024 * 1024 + 42 ];
+	
+	static {
+		for( int i = SMALL.length; i-- != 0; ) SMALL[ i ] = (byte)i;
+		for( int i = LARGE.length; i-- != 0; ) LARGE[ i ] = (byte)i;
+	}
+	
+	public void testBytes( byte[] a ) throws IOException {
+		final File file = File.createTempFile( getClass().getSimpleName(), "dump" );
+		file.deleteOnExit();
+		final byte[] aShifted = new byte[ a.length + 1 ];
+		System.arraycopy( a, 0, aShifted, 1, a.length );
+		
+		for( int i = 0; i < 6; i++ ) {
+			file.delete();
+			switch(i) {
+			case 0: BinIO.storeBytes( a, file ); break;
+			case 1: BinIO.storeBytes( a, (DataOutput)new DataOutputStream( new FileOutputStream( file ) ) ); break;
+			case 2: BinIO.storeBytes( a, new FileOutputStream( file ) ); break;
+			case 3: BinIO.storeBytes( aShifted, 1, a.length, file ); break;
+			case 4: BinIO.storeBytes( aShifted, 1, a.length, (DataOutput)new DataOutputStream( new FileOutputStream( file ) ) ); break;
+			case 5: BinIO.storeBytes( aShifted, 1, a.length, new FileOutputStream( file ) ); break;
+			}
+			assertArrayEquals( a, BinIO.loadBytes( file ) );
+
+			byte[] b = new byte[ a.length ];
+			assertEquals( a.length, BinIO.loadBytes( file, b ) );
+			assertArrayEquals( a, b );
+			assertEquals( a.length, BinIO.loadBytes( file, b, 0, a.length ) );
+			assertArrayEquals( a, b );
+
+			assertEquals( a.length, BinIO.loadBytes( new FileInputStream( file ), b ) );
+			assertArrayEquals( a, b );
+			assertEquals( a.length, BinIO.loadBytes( new FileInputStream( file ), b, 0, a.length ) );
+			assertArrayEquals( a, b );
+
+			byte[] c = new byte[ a.length + 1 ];
+			assertEquals( a.length, BinIO.loadBytes( new FileInputStream( file ), c ) );
+			assertEquals( 0, c[ a.length ] );
+			System.arraycopy( c, 0, b, 0, b.length );
+			assertArrayEquals( a, b );
+			assertEquals( a.length, BinIO.loadBytes( new FileInputStream( file ), c, 1, a.length ) );
+			assertEquals( 0, c[ 0 ] );
+			System.arraycopy( c, 1, b, 0, b.length );
+			assertArrayEquals( a, b );
+
+			c[ a.length ] = 0;
+			assertEquals( a.length, BinIO.loadBytes( (DataInput)new DataInputStream( new FileInputStream( file ) ), c ) );
+			assertEquals( 0, c[ a.length ] );
+			System.arraycopy( c, 0, b, 0, b.length );
+			assertArrayEquals( a, b );
+			assertEquals( a.length, BinIO.loadBytes( (DataInput)new DataInputStream( new FileInputStream( file ) ), c, 1, a.length ) );
+			assertEquals( 0, c[ 0 ] );
+			System.arraycopy( c, 1, b, 0, b.length );
+			assertArrayEquals( a, b );
+		}
+
+	}
+
+	@Test
+	public void testBytes() throws IOException {
+		testBytes( SMALL );
+		testBytes( LARGE );
+	}
+
+	public void testBigBytes( byte[][] a ) throws IOException {
+		final File file = File.createTempFile( getClass().getSimpleName(), "dump" );
+		file.deleteOnExit();
+		final long length = ByteBigArrays.length( a );
+		final byte[][] aShifted = ByteBigArrays.newBigArray( length + 1 );
+		ByteBigArrays.copy( a, 0, aShifted, 1, length );
+		
+		for( int i = 0; i < 6; i++ ) {
+			file.delete();
+			switch(i) {
+			case 0: BinIO.storeBytes( a, file ); break;
+			case 1: BinIO.storeBytes( a, (DataOutput)new DataOutputStream( new FileOutputStream( file ) ) ); break;
+			case 2: BinIO.storeBytes( a, new FileOutputStream( file ) ); break;
+			case 3: BinIO.storeBytes( aShifted, 1, length, file ); break;
+			case 4: BinIO.storeBytes( aShifted, 1, length, (DataOutput)new DataOutputStream( new FileOutputStream( file ) ) ); break;
+			case 5: BinIO.storeBytes( aShifted, 1, length, new FileOutputStream( file ) ); break;
+			}
+			assertArrayEquals( a, BinIO.loadBytesBig( file ) );
+
+			byte[][] b = ByteBigArrays.newBigArray( length );
+			assertEquals( length, BinIO.loadBytes( file, b ) );
+			assertArrayEquals( a, b );
+			assertEquals( length, BinIO.loadBytes( file, b, 0, length ) );
+			assertArrayEquals( a, b );
+
+			assertEquals( length, BinIO.loadBytes( new FileInputStream( file ), b ) );
+			assertArrayEquals( a, b );
+			assertEquals( length, BinIO.loadBytes( new FileInputStream( file ), b, 0, length ) );
+			assertArrayEquals( a, b );
+
+			byte[][] c = ByteBigArrays.newBigArray( length + 1 );
+			assertEquals( length, BinIO.loadBytes( new FileInputStream( file ), c ) );
+			assertEquals( 0, ByteBigArrays.get( c, length ) );
+			ByteBigArrays.copy( c, 0, b, 0, b.length );
+			assertArrayEquals( a, b );
+			assertEquals( length, BinIO.loadBytes( new FileInputStream( file ), c, 1, length ) );
+			assertEquals( 0, ByteBigArrays.get( c, 0 ) );
+			ByteBigArrays.copy( c, 1, b, 0, b.length );
+			assertArrayEquals( a, b );
+
+			ByteBigArrays.set( c, length, (byte)0 );
+			assertEquals( length, BinIO.loadBytes( (DataInput)new DataInputStream( new FileInputStream( file ) ), c ) );
+			assertEquals( 0, ByteBigArrays.get( c, length ) );
+			ByteBigArrays.copy( c, 0, b, 0, b.length );
+			assertArrayEquals( a, b );
+			
+			assertEquals( length, BinIO.loadBytes( (DataInput)new DataInputStream( new FileInputStream( file ) ), c, 1, length ) );
+			assertEquals( 0, ByteBigArrays.get( c, 0 ) );
+			ByteBigArrays.copy( c, 1, b, 0, b.length );
+			assertArrayEquals( a, b );
+		}
+
+	}
+
+	@Test
+	public void testBigBytes() throws IOException {
+		testBigBytes( ByteBigArrays.wrap( SMALL ) );
+		testBigBytes( ByteBigArrays.wrap( LARGE ) );
+	}
+	
+	public void testFileDataWrappers() throws IOException {
+		final File file = File.createTempFile( getClass().getSimpleName(), "dump" );
+		file.deleteOnExit();
+		final DataOutputStream dos = new DataOutputStream( new FileOutputStream( file ) );
+		for( int i = 0; i < 100; i++ ) dos.writeDouble( i );
+		dos.close();
+		
+		DoubleIterator di = BinIO.asDoubleIterator( file );
+		for( int i = 0; i < 100; i++ ) assertEquals( i, di.nextDouble(), 0. );
+		assertFalse( di.hasNext() );
+
+		di = BinIO.asDoubleIterator( file );
+		for( int i = 0; i < 100; i++ ) {
+			assertTrue( di.hasNext() );
+			assertEquals( i, di.nextDouble(), 0. );
+		}
+		
+		di = BinIO.asDoubleIterator( file );
+		int s = 1;
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( Math.min( s, 100 - i ), di.skip( s ) );
+			i += s;
+			if ( i >= 100 ) break;
+			assertEquals( i, di.nextDouble(), 0. );
+			s *= 2;
+		}
+
+		di = BinIO.asDoubleIterator( file );
+		s = 1;
+		for( int i = 0; i < 100; i++ ) {
+			if ( s > 100 - i ) break;
+			assertTrue( di.hasNext() );
+			assertEquals( Math.min( s, 100 - i ), di.skip( s ) );
+			i += s;
+			if ( i >= 100 ) {
+				assertFalse( di.hasNext() );
+				break;
+			}
+			assertTrue( di.hasNext() );
+			assertTrue( di.hasNext() ); // To increase coverage
+			assertEquals( i, di.nextDouble(), 0. );
+			s *= 2;
+		}
+
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/io/FastBufferedInputStreamTest.java b/test/it/unimi/dsi/fastutil/io/FastBufferedInputStreamTest.java
new file mode 100644
index 0000000..8dd04d1
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/io/FastBufferedInputStreamTest.java
@@ -0,0 +1,345 @@
+package it.unimi.dsi.fastutil.io;
+
+import it.unimi.dsi.fastutil.io.FastBufferedInputStream;
+import it.unimi.dsi.fastutil.io.FastBufferedInputStream.LineTerminator;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.channels.FileChannel;
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.Random;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class FastBufferedInputStreamTest {
+	private final static boolean DEBUG = false;
+	
+	/** A byte array input stream that will return its data in small chunks,
+	 * even it could actually return more data, and skips less bytes than it could.
+	 */
+	
+	private static class BastardByteArrayInputStream extends ByteArrayInputStream {
+		private final static long seed = System.currentTimeMillis();
+		private final static Random r = new Random( seed );
+		static {
+			System.err.println( "Seed: " + seed );
+		}
+
+		public BastardByteArrayInputStream( byte[] array ) {
+			super( array );
+		}
+
+		@Override
+		public int read( byte[] buffer, int offset, int length ) {
+			int k = r.nextInt( 2 ) + 1;
+			return super.read( buffer, offset, length < k ? length : k );
+		}
+		
+		public long skip( long n ) {
+			int k = r.nextInt( 2 );
+			return super.skip( n < k ? n : k );
+		}
+
+	}
+
+	public void testReadline( int bufferSize ) throws IOException {
+		FastBufferedInputStream stream;
+		byte[] b;
+		
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize );
+		
+		b = new byte[ 4 ];
+		stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.CR ) );
+		assertTrue( Arrays.toString( b ), Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) );
+		assertEquals( 4, stream.position() );
+		assertEquals( -1, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.CR ) ) );
+
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize );
+		assertEquals( 4, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.LF ) ) );
+		assertEquals( 4, stream.position() );
+
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize );
+		assertEquals( 4, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.LF ) ) );
+		assertEquals( 4, stream.position() );
+
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize );
+		assertEquals( 4, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.CR_LF ) ) );
+		assertEquals( 4, stream.position() );
+
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize );
+		assertEquals( 4, stream.readLine( b, 0, b.length, EnumSet.of( LineTerminator.CR_LF ) ) );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', '\r' } ) );
+		assertEquals( 4, stream.position() );
+		
+		b = new byte[ 4 ];
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize );
+		stream.readLine( b, 0, 2, EnumSet.of( LineTerminator.CR ) );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 0, 0 } ) );
+		assertEquals( 2, stream.position() );
+		
+		// Reads with only LF as terminator
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize );
+		assertEquals( 4, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.LF ) ) );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', '\r' } ) );
+		assertEquals( 4, stream.position() );
+		assertEquals( 0, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.LF ) ) );
+		assertEquals( 5, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', '\r' } ) );
+		assertEquals( 1, stream.readLine( b, 2, 2, EnumSet.of( LineTerminator.LF ) ) );
+		assertEquals( 6, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'D', '\r' } ) );
+
+		// Reads with both LF and CR/LF as terminators
+		b = new byte[ 4 ];
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize );
+		assertEquals( 3, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR, LineTerminator.CR_LF ) ) );
+		assertEquals( 5, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) );
+		assertEquals( 1, stream.readLine( b, 2, 2, EnumSet.of( LineTerminator.CR, LineTerminator.CR_LF ) ) );
+		assertEquals( 6, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'D', 0 } ) );
+
+		// Reads with only CR as terminator
+		b = new byte[ 4 ];
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize );
+		assertEquals( 3, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR ) ) );
+		assertEquals( 4, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) );
+		assertEquals( 2, stream.readLine( b, 2, 2, EnumSet.of( LineTerminator.CR ) ) );
+		assertEquals( 6, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', '\n', 'D' } ) );
+
+		// Reads with only CR/LF as terminator
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize );
+		b = new byte[ 4 ];
+		assertEquals( 3, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR_LF ) ) );
+		assertEquals( 5, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) );
+		assertEquals( 1, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR_LF ) ) );
+		assertEquals( 6, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'D', 'B', 'C', 0 } ) );
+		assertEquals( -1, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR_LF ) ) );
+
+		// Reads with both CR and CR/LF as terminator
+		
+		// CR at end-of-file
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r' } ), bufferSize );
+		b = new byte[ 4 ];
+		assertEquals( 3, stream.readLine( b, 0, 4, EnumSet.of( LineTerminator.CR_LF, LineTerminator.CR ) ) );
+		assertEquals( 4, stream.position() );
+		assertTrue( Arrays.equals( b, new byte[] { 'A', 'B', 'C', 0 } ) );
+
+	}
+
+	@Test
+	public void testReadLine() throws IOException {
+		testReadline( 1 );
+		testReadline( 2 );
+		testReadline( 3 );
+		testReadline( 4 );
+		testReadline( 5 );
+		testReadline( 6 );
+		testReadline( 7 );
+		testReadline( 100 );
+	}
+
+	public void testSkip( int bufferSize ) throws IOException {
+		FastBufferedInputStream stream;
+		
+		stream = new FastBufferedInputStream( new BastardByteArrayInputStream( new byte[] { 'A', 'B', 'C', '\r', '\n', 'D' } ), bufferSize );
+		assertEquals( 2, stream.skip( 2 ) );
+		assertEquals( 2, stream.position() );
+		assertEquals( 1, stream.skip( 1 ) );
+		assertEquals( 3, stream.position() );
+		assertEquals( 3, stream.skip( 4 ) );
+		assertEquals( 6, stream.position() );
+		assertEquals( 0, stream.skip( 1 ) );
+		assertEquals( 6, stream.position() );
+	}
+
+	@Test
+	public void testSkip() throws IOException {
+		testSkip( 1 );
+		testSkip( 2 );
+		testSkip( 3 );
+		testSkip( 4 );
+		testSkip( 5 );
+		testSkip( 6 );
+		testSkip( 7 );
+		testSkip( 100 );
+	}
+	
+	@Test
+	public void testPosition() throws IOException {
+		File temp = File.createTempFile( this.getClass().getSimpleName(), ".tmp" );
+		temp.deleteOnExit();
+		FileOutputStream fos = new FileOutputStream( temp );
+		fos.write( new byte[] { 0, 1, 2, 3, 4 } );
+		fos.close();
+		
+		FastBufferedInputStream stream = new FastBufferedInputStream( new FileInputStream( temp ), 2 );
+		byte[] b = new byte[ 2 ];
+		stream.read( b );
+		stream.flush();
+		stream.position( 0 );
+		assertEquals( 0, stream.read() );
+		stream.close();
+		
+		stream = new FastBufferedInputStream( new FileInputStream( temp ) );
+		b = new byte[ 1 ];
+		stream.read( b );
+		stream.flush();
+		stream.position( 0 );
+		assertEquals( 0, stream.read() );
+		stream.close();
+
+		stream = new FastBufferedInputStream( new FileInputStream( temp ) );
+		b = new byte[ 5 ];
+		stream.read( b );
+		stream.flush();
+		assertEquals( -1, stream.read() );
+		stream.position( 5 );
+		assertEquals( -1, stream.read() );
+		stream.position( 0 );
+		assertEquals( 0, stream.read() );
+		stream.position( 1 );
+		assertEquals( 1, stream.read() );
+		stream.position( 3 );
+		assertEquals( 3, stream.read() );
+		stream.position( 1 );
+		assertEquals( 1, stream.read() );
+		stream.position( 0 );
+		assertEquals( 0, stream.read() );
+		stream.close();
+	}
+	
+	@Test
+	public void testRead() throws IOException {
+		// Reads with length larger than buffer size
+		
+		// No head, no stream
+		InputStream stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] {} ), 1 );
+		byte[] b = new byte[ 4 ];
+		
+		assertEquals( -1, stream.read( b, 0, 2 ) );
+		
+		// Some head, no stream
+		stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] { 'A', 'B' } ), 2 );
+		b = new byte[ 4 ];
+		
+		assertEquals( 1, stream.read( b, 0, 1 ) );
+		assertEquals( 1, stream.read( b, 0, 3 ) );
+		
+		// Some head, some stream
+		stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] { 'A', 'B', 'C', 'D' } ), 2 );
+		b = new byte[ 4 ];
+		
+		assertEquals( 1, stream.read( b, 0, 1 ) );
+		assertEquals( 3, stream.read( b, 0, 3 ) );
+
+		// No head, some stream
+		stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] { 'A', 'B', 'C', 'D' } ), 2 );
+		b = new byte[ 4 ];
+		
+		assertEquals( 3, stream.read( b, 0, 3 ) );
+		
+		// Reads with length smaller than or equal to buffer size
+		
+		// No head, no stream
+		stream = new FastBufferedInputStream( new ByteArrayInputStream( new byte[] {} ), 4 );
+		b = new byte[ 4 ];
+		
+		assertEquals( -1, stream.read( b, 0, 2 ) );
+
+	}
+	
+	public void testRandom( int bufferSize ) throws IOException {
+		File temp = File.createTempFile( this.getClass().getSimpleName(), "tmp" );
+		temp.deleteOnExit();
+		
+		// Create temp random file
+		FileOutputStream out = new FileOutputStream( temp );
+		Random random = new Random();
+		int length = 100000 + random.nextInt( 10000 );
+		for( int i = 0; i < length; i++ ) out.write( random.nextInt() );
+		out.close();
+
+		FastBufferedInputStream bis = new FastBufferedInputStream( new FileInputStream( temp ), bufferSize );
+		FileInputStream test = new FileInputStream( temp );
+		FileChannel fc = test.getChannel();
+		int a1, a2, off, len, pos;
+		byte b1[] = new byte[ 32768 ];
+		byte b2[] = new byte[ 32768 ];
+
+		while( true ) {
+
+			switch( random.nextInt( 6 ) ) {
+
+			case 0:
+				if ( DEBUG ) System.err.println("read()");
+				a1 = bis.read();
+				a2 = test.read();
+				assertEquals( a1, a2 );
+				if ( a1 == -1 ) return;
+				break;
+
+			case 1:
+				off = random.nextInt( b1.length );
+				len = random.nextInt( b1.length - off + 1 );
+				a1 = bis.read( b1, off, len );
+				a2 = test.read( b2, off, len );
+				if ( DEBUG ) System.err.println("read(b, " + off + ", " + len + ")");
+
+				assertEquals( a1, a2 );
+
+				for( int i = off; i < off + len; i++ ) assertEquals( "Position " + i, b1[ i ], b2[ i ] );
+				break;
+
+			case 2:
+				if ( DEBUG ) System.err.println("available()");
+				assertEquals( bis.available(), test.available() );
+				break;
+
+			case 3:
+				if ( DEBUG ) System.err.println("position()" );
+				pos = (int)bis.position();
+				assertEquals( (int)fc.position(), pos );
+				break;
+
+			case 4:
+				pos = random.nextInt( length );
+				bis.position( pos );
+				if ( DEBUG ) System.err.println("position(" + pos + ")" );
+				(test = new FileInputStream( temp )).skip( pos );
+				fc = test.getChannel();
+				break;
+
+			case 5:
+				pos = random.nextInt( (int)(length - bis.position() + 1) );
+				a1 = (int)bis.skip( pos );
+				a2 = (int)test.skip( pos );
+				if ( DEBUG ) System.err.println("skip(" + pos + ")" );
+				assertEquals( a1, a2 );
+				break;
+			}
+		}
+
+	}
+
+	@Test
+	public void testRandom() throws IOException {
+		testRandom( 1 );
+		testRandom( 2 );
+		testRandom( 3 );
+		testRandom( 100 );
+		testRandom( 2048 );
+	}
+}
+
diff --git a/test/it/unimi/dsi/fastutil/io/FastBufferedOutputStreamTest.java b/test/it/unimi/dsi/fastutil/io/FastBufferedOutputStreamTest.java
new file mode 100644
index 0000000..b8e9d07
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/io/FastBufferedOutputStreamTest.java
@@ -0,0 +1,90 @@
+package it.unimi.dsi.fastutil.io;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.channels.FileChannel;
+import java.util.Arrays;
+import java.util.Random;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class FastBufferedOutputStreamTest {
+
+	private static final boolean DEBUG = false;
+
+	@Test
+	public void testWriteEqualToBufferSize() throws IOException {
+		final FastBufferedOutputStream fbos = new FastBufferedOutputStream( new ByteArrayOutputStream(), 4 );
+		fbos.write( 0 );
+		fbos.write( new byte[ 4 ] );
+		fbos.write( 0 );
+	}
+
+	public void testRandom( int bufSize ) throws FileNotFoundException, IOException {
+
+		File file = File.createTempFile( getClass().getSimpleName(), "test" );
+		file.deleteOnExit();
+		FastBufferedOutputStream fbos = new FastBufferedOutputStream( new FileOutputStream( file + "1" ), bufSize );
+		FileOutputStream bos = new FileOutputStream( file + "2" );
+		FileChannel fc = bos.getChannel();
+		Random r = new Random();
+		long pos, len;
+		
+		int j = r.nextInt( 10000 );
+		while( j-- != 0 ) {
+			switch( r.nextInt( 6 ) ) {
+
+			case 0:
+				int x = (byte)r.nextInt();
+				fbos.write( x );
+				bos.write(x );
+				break;
+
+			case 1:
+				byte[] b  = new byte[ r.nextInt( 32768 ) + 16 ];
+				for( int i = 0; i < b.length; i++ ) b[ i ] = (byte)r.nextInt();
+				int offset = r.nextInt( b.length / 4 );
+				int length = r.nextInt( b.length - offset );
+				fbos.write( b, offset, length );
+				bos.write( b, offset, length );
+				break;
+
+			case 2:
+				fbos.flush();
+				break;
+				
+			case 3:
+				if ( DEBUG ) System.err.println("position()" );
+				pos = (int)fbos.position();
+				assertEquals( (int)fc.position(), pos );
+				break;
+
+			case 4:
+				assertEquals( fc.size(), len = fbos.length() );
+				pos = len != 0 ? r.nextInt( (int)len ) : 0;
+				fbos.position( pos );
+				fc.position( pos );
+				if ( DEBUG ) System.err.println("position(" + pos + ")" );
+				break;
+			}
+		}
+
+		fbos.close();
+		bos.close();
+		assertTrue( Arrays.equals( BinIO.loadBytes( file + "1" ), BinIO.loadBytes( file + "2" ) ) );
+	}
+	
+	@Test
+	public void testRandom() throws FileNotFoundException, IOException {
+		testRandom( 1 );
+		testRandom( 2 );
+		testRandom( 3 );
+		testRandom( 1024 );
+	}
+}
+
diff --git a/test/it/unimi/dsi/fastutil/io/FastByteArrayOutputStreamTest.java b/test/it/unimi/dsi/fastutil/io/FastByteArrayOutputStreamTest.java
new file mode 100644
index 0000000..88d3c4d
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/io/FastByteArrayOutputStreamTest.java
@@ -0,0 +1,100 @@
+package it.unimi.dsi.fastutil.io;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream;
+import static org.junit.Assert.*;
+
+public class FastByteArrayOutputStreamTest {
+
+	@SuppressWarnings("resource")
+	@Test
+	public void testWrite() {
+		FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream();
+		fbaos.write( 1 );
+		fbaos.write( 2 );
+		assertEquals( 1, fbaos.array[ 0 ] );
+		assertEquals( 2, fbaos.array[ 1 ] );
+		assertEquals( 2, fbaos.length );
+		assertEquals( 2, fbaos.position() );
+		fbaos.position( 1 );
+		fbaos.write( 3 );
+		assertEquals( 2, fbaos.position() );
+		assertEquals( 2, fbaos.length );
+		assertEquals( 3, fbaos.array[ 1 ] );
+		fbaos.write( 4 );
+		assertEquals( 3, fbaos.length );
+		assertEquals( 4, fbaos.array[ 2 ] );
+
+		for( int i = 0; i < 14; i++ ) fbaos.write( i + 10 );
+		assertEquals( 17, fbaos.length );
+		for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 3 + i ] );
+	}
+
+	@SuppressWarnings("resource")
+	@Test
+	public void testWriteArray() throws IOException {
+		FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream();
+		fbaos.write( 1 );
+		fbaos.write( 2 );
+		fbaos.write( 3 );
+
+		byte[] a = new byte[ 14 ];
+		for( int i = 0; i < 14; i++ ) a[ i ] = (byte)( i + 10 );
+		fbaos.write( a );
+		assertEquals( 17, fbaos.length );
+		assertEquals( 1, fbaos.array[ 0 ] );
+		assertEquals( 2, fbaos.array[ 1 ] );
+		assertEquals( 3, fbaos.array[ 2 ] );
+		for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 3 + i ] );
+
+		fbaos.write( a );
+		assertEquals( 31, fbaos.length );
+		for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 17 + i ] );
+
+		fbaos = new FastByteArrayOutputStream();
+		fbaos.write( 1 );
+		fbaos.write( 2 );
+		fbaos.write( 3 );
+		fbaos.position( 2 );
+		
+		fbaos.write( a );
+		assertEquals( 16, fbaos.length );
+		assertEquals( 1, fbaos.array[ 0 ] );
+		assertEquals( 2, fbaos.array[ 1 ] );
+		for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 2 + i ] );
+
+		fbaos = new FastByteArrayOutputStream();
+		fbaos.write( 1 );
+		fbaos.write( 2 );
+		fbaos.write( 3 );
+		fbaos.write( 4 );
+		fbaos.position( 3 );
+		
+		fbaos.write( a );
+		assertEquals( 17, fbaos.length );
+		assertEquals( 1, fbaos.array[ 0 ] );
+		assertEquals( 2, fbaos.array[ 1 ] );
+		assertEquals( 3, fbaos.array[ 2 ] );
+		for( int i = 0; i < 14; i++ ) assertEquals( i + 10, fbaos.array[ 3 + i ] );
+	}
+
+	@SuppressWarnings("resource")
+	@Test
+	public void testPositionWrite() {
+		FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream();
+		fbaos.position( 1 );
+		fbaos.write( 1 );
+		assertEquals( 2, fbaos.length );
+	}
+
+	@SuppressWarnings("resource")
+	@Test
+	public void testPositionWrite2() {
+		FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream();
+		fbaos.position( fbaos.array.length + 2 );
+		fbaos.write( 1 );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStreamTest.java b/test/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStreamTest.java
new file mode 100644
index 0000000..0e48b2a
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/io/InspectableFileCachedInputStreamTest.java
@@ -0,0 +1,216 @@
+package it.unimi.dsi.fastutil.io;
+
+/*		 
+ * fastutil: Fast & compact type-specific collections for Java
+ *
+ * Copyright (C) 2013 Sebastiano Vigna 
+ *
+ *  This library is free software; you can redistribute it and/or
+ *  modify it under the terms of the GNU Lesser General Public
+ *  License as published by the Free Software Foundation; either
+ *  version 2.1 of the License, or (at your option) any later version.
+ *
+ *  This library is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  Lesser General Public License for more details.
+ *
+ *  You should have received a copy of the GNU Lesser General Public
+ *  License along with this library; if not, write to the Free Software
+ *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ *
+ */
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import it.unimi.dsi.fastutil.io.InspectableFileCachedInputStream;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
+
+import org.junit.Test;
+
+public class InspectableFileCachedInputStreamTest {
+
+	private static final Random r = new Random( 0 );
+	
+	public static List<byte[]> byteArrays;
+	static {
+		byteArrays = new ArrayList<byte[]>();
+		byte[] b;
+		// Now generates byte buffers from 1 byte up to 64KiB; we shuffle them so that they are not increasing in size... 
+		for ( int k = 0; k < 10; k++ ) {
+			b = new byte[ 1 << k ];
+			r.nextBytes( b );
+			byteArrays.add( b );
+		}
+		for ( int k = 16; k >= 10; k-- ) {
+			b = new byte[ 1 << k ];
+			r.nextBytes( b );
+			byteArrays.add( b );
+		}
+		byteArrays.add( new byte[] {} );
+		byteArrays.add( "This is a short\nnon empty and purely ASCII\nbyte sequence".getBytes() );
+	}
+	
+
+	@Test
+	public void testSmall() throws IOException {
+		InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( 4 );
+		assertTrue( icis.isOpen() );
+		byte[] data = new byte[] { 1, 2 };
+		icis.write( ByteBuffer.wrap( data ) );
+		
+		assertEquals( 2, icis.length() );
+		assertEquals( 1, icis.read() );
+		assertEquals( 2, icis.read() );
+		assertEquals( -1, icis.read() );
+		
+		icis.position( 0 );
+		byte b[] = new byte[ 2 ];
+		assertEquals( 2, icis.read( b ) );
+		assertArrayEquals( data, b );
+		assertEquals( -1, icis.read() );
+		assertEquals( -1, icis.read( b, 0, b.length ) );
+		assertEquals( 0, icis.read( b, 0, 0 ) );
+
+		icis.clear();
+		assertTrue( icis.isOpen() );
+		data = new byte[] { 1, 2, 3, 4, 5 };
+		icis.write( ByteBuffer.wrap( data ) );
+		
+		assertEquals( 5, icis.length() );
+		assertEquals( 1, icis.read() );
+		assertEquals( 2, icis.read() );
+		assertEquals( 3, icis.read() );
+		assertEquals( 4, icis.read() );
+		assertEquals( 5, icis.read() );
+		assertEquals( -1, icis.read() );
+		
+		icis.position( 0 );
+		assertEquals( 0, icis.position() );
+		b = new byte[ 5 ];
+		assertEquals( 5, icis.read( b ) );
+		assertArrayEquals( data, b );
+
+		icis.position( 2 );
+		b = new byte[ 4 ];
+		assertEquals( 3, icis.read( b ) );
+		assertArrayEquals( Arrays.copyOfRange( data, 2, 5 ), Arrays.copyOfRange( b, 0, 3 ) );
+
+
+		icis.position( 0 );
+		assertEquals( 1, icis.read() );
+
+		icis.position( 4 );
+		assertEquals( 1, icis.available() );
+		assertEquals( 5, icis.read() );
+		assertEquals( 5, icis.position() );
+
+		icis.position( 0 );
+		assertEquals( 2, icis.skip( 2 ) );
+		assertEquals( 2, icis.skip( 2 ) );
+		assertEquals( 5, icis.read() );
+		assertEquals( 5, icis.position() );
+
+		icis.position( 5 );
+		assertEquals( -1, icis.read() );
+		assertEquals( -1, icis.read( b, 0, b.length ) );
+
+		icis.close();
+		icis.dispose();
+	}
+
+	@Test
+	public void test() throws IOException {
+		for( int bufferSize: new int[] { 1, 2, 1024, 16384, 1024 * 1024 } ) {
+			InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( bufferSize );
+			for( byte[] a: byteArrays ) icis.write( ByteBuffer.wrap( a ) );
+			for( byte[] a: byteArrays ) {
+				final byte[] buffer = new byte[ a.length ];
+				icis.read( buffer );
+				assertArrayEquals( a, buffer );
+			}
+
+			icis.position( 0 );
+			icis.truncate( 0 );
+			
+			for( byte[] a: byteArrays )
+				for( byte b: a ) assertEquals( b, (byte)icis.read() );
+
+			icis.close();
+			icis.dispose();
+		}
+	}
+
+	@Test
+	public void testWithSpecifiedFile() throws IOException {
+		final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( 4, File.createTempFile( getClass().getSimpleName(), "overflow" ) );
+		final byte[] data = new byte[] { 1, 2 };
+		icis.write( ByteBuffer.wrap( data ) );
+		
+		assertEquals( 2, icis.length() );
+		assertEquals( 1, icis.read() );
+		assertEquals( 2, icis.read() );
+		assertEquals( -1, icis.read() );
+		
+		icis.close();
+		icis.dispose();
+	}
+	
+	@Test(expected=IOException.class)
+	public void testClosed() throws IOException {
+		final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( 4 );
+		final byte[] data = new byte[] { 1, 2 };
+		icis.write( ByteBuffer.wrap( data ) );
+		icis.close();
+		assertFalse( icis.isOpen() );
+		icis.read();
+	}
+
+	@Test(expected=IOException.class)
+	public void testDisposed() throws IOException {
+		@SuppressWarnings("resource")
+		final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream( 4 );
+		final byte[] data = new byte[] { 1, 2 };
+		icis.write( ByteBuffer.wrap( data ) );
+		icis.dispose();
+		assertFalse( icis.isOpen() );
+		icis.read();
+	}
+
+	@Test(expected=IOException.class)
+	public void testClearDisposed() throws IOException {
+		@SuppressWarnings("resource")
+		final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream();
+		final byte[] data = new byte[] { 1, 2 };
+		icis.write( ByteBuffer.wrap( data ) );
+		icis.dispose();
+		icis.clear();
+	}
+	
+	@Test(expected=IOException.class)
+	public void testResetDisposed() throws IOException {
+		@SuppressWarnings("resource")
+		final InspectableFileCachedInputStream icis = new InspectableFileCachedInputStream();
+		final byte[] data = new byte[] { 1, 2 };
+		icis.write( ByteBuffer.wrap( data ) );
+		icis.dispose();
+		icis.reset();
+	}
+	
+	@SuppressWarnings("resource")
+	@Test(expected=IllegalArgumentException.class)
+	public void testNegativeBuffer() throws IOException {
+		new InspectableFileCachedInputStream( -1 );
+	}
+
+}
\ No newline at end of file
diff --git a/test/it/unimi/dsi/fastutil/io/TestIOTest.java b/test/it/unimi/dsi/fastutil/io/TestIOTest.java
new file mode 100644
index 0000000..65f1f04
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/io/TestIOTest.java
@@ -0,0 +1,165 @@
+package it.unimi.dsi.fastutil.io;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.bytes.ByteBigArrays;
+import it.unimi.dsi.fastutil.doubles.DoubleIterator;
+
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import org.junit.Test;
+
+public class TestIOTest {
+
+	final static byte[] SMALL = new byte[ 1024 ];
+	final static byte[] LARGE = new byte[ 1024 * 1024 + 42 ];
+	
+	static {
+		for( int i = SMALL.length; i-- != 0; ) SMALL[ i ] = (byte)i;
+		for( int i = LARGE.length; i-- != 0; ) LARGE[ i ] = (byte)i;
+	}
+	
+	public void testBytes( byte[] a ) throws IOException {
+		final File file = File.createTempFile( getClass().getSimpleName(), "dump" );
+		file.deleteOnExit();
+		final byte[] aShifted = new byte[ a.length + 1 ];
+		System.arraycopy( a, 0, aShifted, 1, a.length );
+		
+		for( int i = 0; i < 4; i++ ) {
+			file.delete();
+			switch(i) {
+			case 0: TextIO.storeBytes( a, file ); break;
+			case 1: TextIO.storeBytes( a, new PrintStream( file ) ); break;
+			case 2: TextIO.storeBytes( aShifted, 1, a.length, file ); break;
+			case 3: TextIO.storeBytes( aShifted, 1, a.length, new PrintStream( file ) ); break;
+			}
+			byte[] b = new byte[ a.length ];
+			assertEquals( a.length, TextIO.loadBytes( file, b ) );
+			assertArrayEquals( a, b );
+
+			TextIO.loadBytes( file, b, 0, a.length );
+			assertArrayEquals( a, b );
+
+			assertEquals( a.length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), b ) );
+			assertArrayEquals( a, b );
+			assertEquals( a.length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), b, 0, a.length ) );
+			assertArrayEquals( a, b );
+
+			byte[] c = new byte[ a.length + 1 ];
+			assertEquals( a.length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), c ) );
+			assertEquals( 0, c[ a.length ] );
+			System.arraycopy( c, 0, b, 0, b.length );
+			assertArrayEquals( a, b );
+			assertEquals( a.length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), c, 1, a.length ) );
+			assertEquals( 0, c[ 0 ] );
+			System.arraycopy( c, 1, b, 0, b.length );
+			assertArrayEquals( a, b );
+		}
+
+	}
+
+	@Test
+	public void testBytes() throws IOException {
+		testBytes( SMALL );
+		testBytes( LARGE );
+	}
+
+	public void testBigBytes( byte[][] a ) throws IOException {
+		final File file = File.createTempFile( getClass().getSimpleName(), "dump" );
+		file.deleteOnExit();
+		final long length = ByteBigArrays.length( a );
+		final byte[][] aShifted = ByteBigArrays.newBigArray( length + 1 );
+		ByteBigArrays.copy( a, 0, aShifted, 1, length );
+		
+		for( int i = 0; i < 4; i++ ) {
+			file.delete();
+			switch(i) {
+			case 0: TextIO.storeBytes( a, file ); break;
+			case 1: TextIO.storeBytes( a, new PrintStream( file ) ); break;
+			case 2: TextIO.storeBytes( aShifted, 1, length, file ); break;
+			case 3: TextIO.storeBytes( aShifted, 1, length, new PrintStream( file ) ); break;
+			}
+
+			byte[][] b = ByteBigArrays.newBigArray( length );
+			assertEquals( length, TextIO.loadBytes( file, b ) );
+			assertArrayEquals( a, b );
+			assertEquals( length, TextIO.loadBytes( file, b, 0, length ) );
+			assertArrayEquals( a, b );
+
+			assertEquals( length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), b ) );
+			assertArrayEquals( a, b );
+			assertEquals( length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), b, 0, length ) );
+			assertArrayEquals( a, b );
+
+			byte[][] c = ByteBigArrays.newBigArray( length + 1 );
+			assertEquals( length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), c ) );
+			assertEquals( 0, ByteBigArrays.get( c, length ) );
+			ByteBigArrays.copy( c, 0, b, 0, b.length );
+			assertArrayEquals( a, b );
+			assertEquals( length, TextIO.loadBytes( new BufferedReader( new FileReader( file ) ), c, 1, length ) );
+			assertEquals( 0, ByteBigArrays.get( c, 0 ) );
+			ByteBigArrays.copy( c, 1, b, 0, b.length );
+			assertArrayEquals( a, b );
+		}
+
+	}
+
+	@Test
+	public void testBigBytes() throws IOException {
+		testBigBytes( ByteBigArrays.wrap( SMALL ) );
+		testBigBytes( ByteBigArrays.wrap( LARGE ) );
+	}
+	
+	public void testFileDataWrappers() throws IOException {
+		final File file = File.createTempFile( getClass().getSimpleName(), "dump" );
+		file.deleteOnExit();
+		final DataOutputStream dos = new DataOutputStream( new FileOutputStream( file ) );
+		for( int i = 0; i < 100; i++ ) dos.writeDouble( i );
+		dos.close();
+		
+		DoubleIterator di = TextIO.asDoubleIterator( file );
+		for( int i = 0; i < 100; i++ ) assertEquals( i, di.nextDouble(), 0. );
+		assertFalse( di.hasNext() );
+
+		di = TextIO.asDoubleIterator( file );
+		for( int i = 0; i < 100; i++ ) {
+			assertTrue( di.hasNext() );
+			assertEquals( i, di.nextDouble(), 0. );
+		}
+		
+		di = TextIO.asDoubleIterator( file );
+		int s = 1;
+		for( int i = 0; i < 100; i++ ) {
+			assertEquals( Math.min( s, 100 - i ), di.skip( s ) );
+			i += s;
+			if ( i >= 100 ) break;
+			assertEquals( i, di.nextDouble(), 0. );
+			s *= 2;
+		}
+
+		di = TextIO.asDoubleIterator( file );
+		s = 1;
+		for( int i = 0; i < 100; i++ ) {
+			if ( s > 100 - i ) break;
+			assertTrue( di.hasNext() );
+			assertEquals( Math.min( s, 100 - i ), di.skip( s ) );
+			i += s;
+			if ( i >= 100 ) {
+				assertFalse( di.hasNext() );
+				break;
+			}
+			assertTrue( di.hasNext() );
+			assertTrue( di.hasNext() ); // To increase coverage
+			assertEquals( i, di.nextDouble(), 0. );
+			s *= 2;
+		}
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/longs/LongArrayFrontCodedListTest.java b/test/it/unimi/dsi/fastutil/longs/LongArrayFrontCodedListTest.java
new file mode 100644
index 0000000..e46ea25
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/longs/LongArrayFrontCodedListTest.java
@@ -0,0 +1,125 @@
+package it.unimi.dsi.fastutil.longs;
+
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+
+import java.io.IOException;
+
+import static org.junit.Assert.*;
+import org.junit.Test;
+
+ at SuppressWarnings({ "rawtypes", "unchecked" })
+public class LongArrayFrontCodedListTest {
+
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static long genKey() {
+		return r.nextLong();
+	}
+
+	private static boolean contentEquals( java.util.List x, java.util.List y ) {
+		if ( x.size() != y.size() ) return false;
+		for ( int i = 0; i < x.size(); i++ )
+			if ( !java.util.Arrays.equals( (long[])x.get( i ), (long[])y.get( i ) ) ) return false;
+		return true;
+	}
+
+	private static int l[];
+
+	private static long[][] a;
+
+	private static void test( int n ) throws IOException, ClassNotFoundException {
+		l = new int[ n ];
+		a = new long[ n ][];
+		for ( int i = 0; i < n; i++ )
+			l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 );
+		for ( int i = 0; i < n; i++ )
+			a[ i ] = new long[ l[ i ] ];
+		for ( int i = 0; i < n; i++ )
+			for ( int j = 0; j < l[ i ]; j++ )
+				a[ i ][ j ] = genKey();
+		LongArrayFrontCodedList m = new LongArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 );
+		it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a );
+		// System.out.println(m);
+		// for( i = 0; i < t.size(); i++ )
+		// System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i)));
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) );
+		/* Now we check cloning. */
+		assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) );
+		/* Now we play with iterators. */
+		{
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator();
+			j = t.listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (long[])i.next(), (long[])j.next() ) );
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (long[])i.previous(), (long[])j.previous() ) );
+				}
+				assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() );
+			}
+		}
+		{
+			int from = r.nextInt( m.size() + 1 );
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator( from );
+			j = t.listIterator( from );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (long[])i.next(), (long[])j.next() ) );
+					// System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (long[])i.previous(), (long[])j.previous() ) );
+				}
+			}
+		}
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (LongArrayFrontCodedList)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1 );
+	}
+
+	@Test
+	public void test10() throws Exception, ClassNotFoundException {
+		test( 10 );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100 );
+	}
+
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000 );
+	}
+
+	@Test
+	public void test10000() throws IOException, ClassNotFoundException {
+		test( 10000 );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/longs/LongArraysTest.java b/test/it/unimi/dsi/fastutil/longs/LongArraysTest.java
new file mode 100644
index 0000000..a4023cc
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/longs/LongArraysTest.java
@@ -0,0 +1,127 @@
+package it.unimi.dsi.fastutil.longs;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.Random;
+
+import org.junit.Test;
+
+public class LongArraysTest {
+	
+	private static long[] identity( int n ) {
+		final long[] a = new long[ n ];
+		while( n-- != 0 ) a[ n ] = n;
+		return a;
+	}
+
+	@Test
+	public void testRadixSort1() {
+		long[] t = { 2, 1, 0, 4 };
+		LongArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = new long[] { 2, -1, 0, -4 };
+		LongArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = LongArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		LongArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new long[ 100 ];
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextLong();
+		LongArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new long[ 100000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextLong();
+		LongArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new long[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = random.nextLong();
+		LongArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		long[][] d = new long[ 2 ][];
+
+		d[ 0 ] = new long[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3;
+		d[ 1 ] = LongArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		LongArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new long[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100;
+		d[ 1 ] = LongArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		LongArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new long[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = i % 3 - 2;
+		Random random = new Random( 0 );
+		d[ 1 ] = new long[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextLong();
+		LongArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new long[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextLong();
+		d[ 1 ] = new long[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextLong();
+		LongArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+	}
+
+	@Test
+	public void testRadixSort() {
+		long[][] t = { { 2, 1, 0, 4 } };
+		LongArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+		
+		t[ 0 ] = LongArrays.shuffle( identity( 100 ), new Random( 0 ) );
+		LongArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+
+		long[][] d = new long[ 2 ][];
+
+		d[ 0 ] = new long[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 3 - i % 3;
+		d[ 1 ] = LongArrays.shuffle( identity( 10 ), new Random( 0 ) );
+		LongArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new long[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = 100 - i % 100;
+		d[ 1 ] = LongArrays.shuffle( identity( 100000 ), new Random( 6 ) );
+		LongArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new long[ 10 ];
+		Random random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextLong();
+		d[ 1 ] = new long[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextLong();
+		LongArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new long[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = random.nextLong();
+		d[ 1 ] = new long[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = random.nextLong();
+		LongArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Long.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/objects/AbstractObject2IntFunctionTest.java b/test/it/unimi/dsi/fastutil/objects/AbstractObject2IntFunctionTest.java
new file mode 100644
index 0000000..67950da
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/objects/AbstractObject2IntFunctionTest.java
@@ -0,0 +1,16 @@
+package it.unimi.dsi.fastutil.objects;
+
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+public class AbstractObject2IntFunctionTest {
+	@Test
+	public void testRemove() {
+		final Object2IntArrayMap<Object> a = new Object2IntArrayMap<Object>();
+		final Object key = new Object();
+		a.put( key, 1 );
+		assertEquals( Integer.valueOf( 1 ), a.remove( key ) );
+	}
+}
+
diff --git a/test/it/unimi/dsi/fastutil/objects/Object2IntOpenHashMapTest.java b/test/it/unimi/dsi/fastutil/objects/Object2IntOpenHashMapTest.java
new file mode 100644
index 0000000..c995771
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/objects/Object2IntOpenHashMapTest.java
@@ -0,0 +1,201 @@
+package it.unimi.dsi.fastutil.objects;
+
+import it.unimi.dsi.fastutil.Hash;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.junit.Ignore;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+ at SuppressWarnings("rawtypes")
+public class Object2IntOpenHashMapTest {
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static Object genKey() {
+		return Integer.toBinaryString( r.nextInt() );
+	}
+
+	private static int genValue() {
+		return r.nextInt();
+	}
+
+	private static boolean valEquals( Object o1, Object o2 ) {
+		return o1 == null ? o2 == null : o1.equals( o2 );
+	}
+
+	@SuppressWarnings("unchecked")
+	protected static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		Object2IntOpenHashMap m = new Object2IntOpenHashMap( Hash.DEFAULT_INITIAL_SIZE, f );
+		Map t = new java.util.HashMap();
+		/* First of all, we fill t with random data. */
+		for ( int i = 0; i < n; i++ )
+			t.put( ( genKey() ), ( Integer.valueOf( genValue() ) ) );
+		/* Now we add to m the same data */
+		m.putAll( t );
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after insertion (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds the same keys. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key (" + o + ") after insertion (iterating on t)", m.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after insertion (iterating on t)", m.keySet().contains( o ) );
+		}
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key after insertion (iterating on m)", t.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (in keySet()) after insertion (iterating on m)", t.keySet().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values. */
+ 		for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after insertion (iterating on t)", m.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on t)", m.values().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after insertion (iterating on m)", t.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after insertion (iterating on m)", t.values().contains( o ) );
+		}
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			Object T = genKey();
+			assertFalse( "Error: divergence in keys between t and m (polymorphic method)", m.containsKey( ( T ) ) != t.containsKey( ( T ) ) );
+			assertFalse( "Error: divergence between t and m (polymorphic method)", ( m.getInt( T ) != ( 0 ) ) != ( ( t.get( ( T ) ) == null ? ( 0 ) : ( ( ( (Integer)( t.get( ( T ) ) ) ).intValue() ) ) ) != ( 0 ) ) ||
+					t.get( ( T ) ) != null &&
+					!( Integer.valueOf( m.getInt( T ) ) ).equals( t.get( ( T ) ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence between t and m (standard method)", valEquals( m.get( ( T ) ), t.get( ( T ) ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			Object T = genKey();
+			int U = genValue();
+			assertTrue( "Error: divergence in put() between t and m", valEquals( m.put( ( T ), ( Integer.valueOf( U ) ) ), t.put( ( T ), ( Integer.valueOf( U ) ) ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m", valEquals( m.remove( ( T ) ), t.remove( ( T ) ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		/* Now we check that m actually holds the same data. */
+		for ( java.util.Iterator i = t.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on t)", valEquals( e.getValue(), m.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.entrySet().iterator(); i.hasNext(); ) {
+			java.util.Map.Entry e = (java.util.Map.Entry)i.next();
+			assertTrue( "Error: m and t differ on an entry (" + e + ") after removal (iterating on m)", valEquals( e.getValue(), t.get( e.getKey() ) ) );
+		}
+		/* Now we check that m actually holds the same keys. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key (" + o + ") after removal (iterating on t)", m.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (" + o + ", in keySet()) after removal (iterating on t)", m.keySet().contains( o ) );
+		}
+		/* Now we check that m actually holds the same keys, but iterating on m. */
+		for ( java.util.Iterator i = m.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a key after removal (iterating on m)", t.containsKey( o ) );
+			assertTrue( "Error: m and t differ on a key (in keySet()) after removal (iterating on m)", t.keySet().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values. */
+		for ( java.util.Iterator i = t.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after removal (iterating on t)", m.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on t)", m.values().contains( o ) );
+		}
+		/* Now we check that m actually hold the same values, but iterating on m. */
+		for ( java.util.Iterator i = m.values().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on a value after removal (iterating on m)", t.containsValue( o ) );
+			assertTrue( "Error: m and t differ on a value (in values()) after removal (iterating on m)", t.values().contains( o ) );
+		}
+		int h = m.hashCode();
+		/* Now we save and read m. */
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (Object2IntOpenHashMap)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertTrue( "Error: hashCode() changed after save/read", m.hashCode() == h );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); ) {
+			Object o = i.next();
+			assertTrue( "Error: m and t differ on an entry after save/read", valEquals( m.get( o ), t.get( o ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			Object T = genKey();
+			int U = genValue();
+			assertTrue( "Error: divergence in put() between t and m after save/read", valEquals( m.put( ( T ), ( Integer.valueOf( U ) ) ), t.put( ( T ), ( Integer.valueOf( U ) ) ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m after save/read", valEquals( m.remove( ( T ) ), t.remove( ( T ) ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) );
+		/* Now we take out of m everything, and check that it is empty. */
+		for ( java.util.Iterator i = t.keySet().iterator(); i.hasNext(); )
+			m.remove( i.next() );
+		assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+}
+
diff --git a/test/it/unimi/dsi/fastutil/objects/ObjectBigArrayBigListTest.java b/test/it/unimi/dsi/fastutil/objects/ObjectBigArrayBigListTest.java
new file mode 100644
index 0000000..0cc841b
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/objects/ObjectBigArrayBigListTest.java
@@ -0,0 +1,512 @@
+package it.unimi.dsi.fastutil.objects;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.Iterator;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class ObjectBigArrayBigListTest {
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static int genKey() {
+		return r.nextInt();
+	}
+
+	private static Object[] k, nk;
+
+	private static Object kt[];
+
+	private static Object nkt[];
+
+	@SuppressWarnings({ "unchecked", "boxing" })
+	protected static void testLists( ObjectBigList m, ObjectBigList t, int n, int level ) {
+		Exception mThrowsOutOfBounds, tThrowsOutOfBounds;
+		Object rt = null;
+		Object rm = ( null );
+		if ( level > 4 ) return;
+		/* Now we check that both sets agree on random keys. For m we use the polymorphic method. */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+			Object T = genKey();
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.set( p, T );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.set( p, ( T ) );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): set() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ after set() on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")",
+					t.get( p ).equals( ( m.get( p ) ) ) );
+			p = r.nextInt() % ( n * 2 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ aftre get() on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")",
+					t.get( p ).equals( ( m.get( p ) ) ) );
+		}
+		/* Now we check that both sets agree on random keys. For m we use the standard method. */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ at start on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")", t.get( p )
+					.equals( m.get( p ) ) );
+		}
+		/* Now we check that m and t are equal. */
+		if ( !m.equals( t ) || !t.equals( m ) ) System.err.println( "m: " + m + " t: " + t );
+		assertTrue( "Error (" + level + "): ! m.equals( t ) at start", m.equals( t ) );
+		assertTrue( "Error (" + level + "): ! t.equals( m ) at start", t.equals( m ) );
+		/* Now we check that m actually holds that data. */
+		for ( Iterator i = t.iterator(); i.hasNext(); ) {
+			assertTrue( "Error (" + level + "): m and t differ on an entry after insertion (iterating on t)", m.contains( i.next() ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( Iterator i = m.listIterator(); i.hasNext(); ) {
+			assertTrue( "Error (" + level + "): m and t differ on an entry after insertion (iterating on m)", t.contains( i.next() ) );
+		}
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error (" + level + "): divergence in content between t and m (polymorphic method)", m.contains( T ) == t.contains( ( T ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error (" + level + "): divergence in content between t and m (polymorphic method)", m.contains( ( T ) ) == t.contains( ( T ) ) );
+		}
+		/* Now we add and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 2 * n; i++ ) {
+			Object T = genKey();
+			try {
+				m.add( T );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.add( ( T ) );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			T = genKey();
+			int p = r.nextInt() % ( 2 * n + 1 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.add( p, T );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.add( p, ( T ) );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): add() divergence in IndexOutOfBoundsException for index " + p + " for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			p = r.nextInt() % ( 2 * n + 1 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				rm = m.remove( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				rt = t.remove( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): remove() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): divergence in remove() between t and m (" + rt + ", " + rm + ")", rt.equals( ( rm ) ) );
+		}
+		assertTrue( "Error (" + level + "): ! m.equals( t ) after add/remove", m.equals( t ) );
+		assertTrue( "Error (" + level + "): ! t.equals( m ) after add/remove", t.equals( m ) );
+		/*
+		 * Now we add random data in m and t using addAll on a collection, checking that the result
+		 * is the same.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			java.util.Collection m1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for ( int j = 0; j < s; j++ )
+				m1.add( ( genKey() ) );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.addAll( p, m1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.addAll( p, m1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error (" + level + m + t + "): ! m.equals( t ) after addAll", m.equals( t ) );
+			assertTrue( "Error (" + level + m + t + "): ! t.equals( m ) after addAll", t.equals( m ) );
+		}
+		if ( m.size64() > n ) {
+			m.size( n );
+			while ( t.size() != n )
+				t.remove( t.size() - 1 );
+		}
+		/*
+		 * Now we add random data in m and t using addAll on a type-specific collection, checking
+		 * that the result is the same.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			ObjectCollection m1 = new ObjectBigArrayBigList();
+			java.util.Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for ( int j = 0; j < s; j++ ) {
+				Object x = genKey();
+				m1.add( x );
+				t1.add( ( x ) );
+			}
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.addAll( p, m1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.addAll( p, t1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): polymorphic addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds
+					+ ")", ( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error (" + level + m + t + "): ! m.equals( t ) after polymorphic addAll", m.equals( t ) );
+			assertTrue( "Error (" + level + m + t + "): ! t.equals( m ) after polymorphic addAll", t.equals( m ) );
+		}
+		if ( m.size64() > n ) {
+			m.size( n );
+			while ( t.size() != n )
+				t.remove( t.size() - 1 );
+		}
+		/*
+		 * Now we add random data in m and t using addAll on a list, checking that the result is the
+		 * same.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( 2 * n + 1 );
+			ObjectBigList m1 = new ObjectBigArrayBigList();
+			java.util.Collection t1 = new java.util.ArrayList();
+			int s = r.nextInt( n / 2 + 1 );
+			for ( int j = 0; j < s; j++ ) {
+				Object x = genKey();
+				m1.add( x );
+				t1.add( ( x ) );
+			}
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.addAll( p, m1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.addAll( p, t1 );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): list addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			assertTrue( "Error (" + level + "): ! m.equals( t ) after list addAll", m.equals( t ) );
+			assertTrue( "Error (" + level + "): ! t.equals( m ) after list addAll", t.equals( m ) );
+		}
+		/* Now we check that both sets agree on random keys. For m we use the standard method. */
+		for ( int i = 0; i < n; i++ ) {
+			int p = r.nextInt() % ( n * 2 );
+			mThrowsOutOfBounds = tThrowsOutOfBounds = null;
+			try {
+				m.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				mThrowsOutOfBounds = e;
+			}
+			try {
+				t.get( p );
+			}
+			catch ( IndexOutOfBoundsException e ) {
+				tThrowsOutOfBounds = e;
+			}
+			assertTrue( "Error (" + level + "): get() divergence in IndexOutOfBoundsException for index " + p + "  (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
+					( mThrowsOutOfBounds == null ) == ( tThrowsOutOfBounds == null ) );
+			if ( mThrowsOutOfBounds == null ) assertTrue( "Error (" + level + "): m and t differ on position " + p + " (" + m.get( p ) + ", " + t.get( p ) + ")", t.get( p ).equals( m.get( p ) ) );
+		}
+		/* Now we inquiry about the content with indexOf()/lastIndexOf(). */
+		for ( int i = 0; i < 10 * n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error (" + level + "): indexOf() divergence for " + T + "  (" + m.indexOf( ( T ) ) + ", " + t.indexOf( ( T ) ) + ")", m.indexOf( ( T ) ) == t.indexOf( ( T ) ) );
+			assertTrue( "Error (" + level + "): lastIndexOf() divergence for " + T + "  (" + m.lastIndexOf( ( T ) ) + ", " + t.lastIndexOf( ( T ) ) + ")",
+					m.lastIndexOf( ( T ) ) == t.lastIndexOf( ( T ) ) );
+			assertTrue( "Error (" + level + "): polymorphic indexOf() divergence for " + T + "  (" + m.indexOf( T ) + ", " + t.indexOf( ( T ) ) + ")", m.indexOf( T ) == t.indexOf( ( T ) ) );
+			assertTrue( "Error (" + level + "): polymorphic lastIndexOf() divergence for " + T + "  (" + m.lastIndexOf( T ) + ", " + t.lastIndexOf( ( T ) ) + ")",
+					m.lastIndexOf( T ) == t.lastIndexOf( ( T ) ) );
+		}
+		/* Now we check cloning. */
+		if ( level == 0 ) {
+			assertTrue( "Error (" + level + "): m does not equal m.clone()", m.equals( ( (ObjectBigArrayBigList)m ).clone() ) );
+			assertTrue( "Error (" + level + "): m.clone() does not equal m", ( (ObjectBigArrayBigList)m ).clone().equals( m ) );
+		}
+		/* Now we play with constructors. */
+		assertTrue( "Error (" + level + "): m does not equal new ( type-specific Collection m )", m.equals( new ObjectBigArrayBigList( (ObjectCollection)m ) ) );
+		assertTrue( "Error (" + level + "): new ( type-specific nCollection m ) does not equal m", ( new ObjectBigArrayBigList( (ObjectCollection)m ) ).equals( m ) );
+		assertTrue( "Error (" + level + "): m does not equal new ( type-specific List m )", m.equals( new ObjectBigArrayBigList( m ) ) );
+		assertTrue( "Error (" + level + "): new ( type-specific List m ) does not equal m", ( new ObjectBigArrayBigList( m ) ).equals( m ) );
+		assertTrue( "Error (" + level + "): m does not equal new ( m.listIterator() )", m.equals( new ObjectBigArrayBigList( m.listIterator() ) ) );
+		assertTrue( "Error (" + level + "): new ( m.listIterator() ) does not equal m", ( new ObjectBigArrayBigList( m.listIterator() ) ).equals( m ) );
+		assertTrue( "Error (" + level + "): m does not equal new ( m.type_specific_iterator() )", m.equals( new ObjectBigArrayBigList( m.iterator() ) ) );
+		assertTrue( "Error (" + level + "): new ( m.type_specific_iterator() ) does not equal m", ( new ObjectBigArrayBigList( m.iterator() ) ).equals( m ) );
+		int h = m.hashCode();
+		/* Now we save and read m. */
+		ObjectBigList m2 = null;
+		try {
+			java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+			java.io.OutputStream os = new java.io.FileOutputStream( ff );
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+			oos.writeObject( m );
+			oos.close();
+			java.io.InputStream is = new java.io.FileInputStream( ff );
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+			m2 = (ObjectBigList)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch ( Exception e ) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+		assertTrue( "Error (" + level + "): hashCode() changed after save/read", m2.hashCode() == h );
+		/* Now we check that m2 actually holds that data. */
+		assertTrue( "Error (" + level + "): ! m2.equals( t ) after save/read", m2.equals( t ) );
+		assertTrue( "Error (" + level + "): ! t.equals( m2 ) after save/read", t.equals( m2 ) );
+		/* Now we take out of m everything, and check that it is empty. */
+		for ( Iterator i = t.iterator(); i.hasNext(); )
+			m2.remove( i.next() );
+		assertTrue( "Error (" + level + "): m2 is not empty (as it should be)", m2.isEmpty() );
+		/* Now we play with iterators. */
+		{
+			ObjectBigListIterator i;
+			ObjectBigListIterator j;
+			i = m.listIterator();
+			j = t.listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error (" + level + "): divergence in hasNext()", i.hasNext() == j.hasNext() );
+				assertTrue( "Error (" + level + "): divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error (" + level + "): divergence in next()", i.next().equals( j.next() ) );
+					if ( r.nextFloat() < 0.2 ) {
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						Object T = genKey();
+						i.set( T );
+						j.set( ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						Object T = genKey();
+						i.add( T );
+						j.add( ( T ) );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error (" + level + "): divergence in previous()", i.previous().equals( j.previous() ) );
+					if ( r.nextFloat() < 0.2 ) {
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						Object T = genKey();
+						i.set( T );
+						j.set( ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						Object T = genKey();
+						i.add( T );
+						j.add( ( T ) );
+					}
+				}
+				assertTrue( "Error (" + level + "): divergence in nextIndex()", i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error (" + level + "): divergence in previousIndex()", i.previousIndex() == j.previousIndex() );
+			}
+		}
+		{
+			Object I, J;
+			int from = r.nextInt( m.size() + 1 );
+			ObjectBigListIterator i;
+			ObjectBigListIterator j;
+			i = m.listIterator( from );
+			j = t.listIterator( from );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error (" + level + "): divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() );
+				assertTrue( "Error (" + level + "): divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					I = i.next();
+					J = j.next();
+					assertTrue( "Error (" + level + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")", I.equals( J ) );
+					// System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+					if ( r.nextFloat() < 0.2 ) {
+						// System.err.println("Removing in next");
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						Object T = genKey();
+						i.set( T );
+						j.set( ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						Object T = genKey();
+						i.add( T );
+						j.add( ( T ) );
+					}
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					I = i.previous();
+					J = j.previous();
+					assertTrue( "Error (" + level + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")", I.equals( J ) );
+					if ( r.nextFloat() < 0.2 ) {
+						// System.err.println("Removing in prev");
+						i.remove();
+						j.remove();
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						Object T = genKey();
+						i.set( T );
+						j.set( ( T ) );
+					}
+					else if ( r.nextFloat() < 0.2 ) {
+						Object T = genKey();
+						i.add( T );
+						j.add( ( T ) );
+					}
+				}
+			}
+		}
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error (" + level + "): ! m.equals( t ) after iteration", m.equals( t ) );
+		assertTrue( "Error (" + level + "): ! t.equals( m ) after iteration", t.equals( m ) );
+		/* Now we select a pair of keys and create a subset. */
+		if ( !m.isEmpty() ) {
+			int start = r.nextInt( m.size() );
+			int end = start + r.nextInt( m.size() - start );
+			// System.err.println("Checking subList from " + start + " to " + end + " (level=" +
+			// (level+1) + ")..." );
+			testLists( m.subList( start, end ), t.subList( start, end ), n, level + 1 );
+			assertTrue( "Error (" + level + m + t + "): ! m.equals( t ) after subList", m.equals( t ) );
+			assertTrue( "Error (" + level + "): ! t.equals( m ) after subList", t.equals( m ) );
+		}
+		m.clear();
+		t.clear();
+		assertTrue( "Error (" + level + "): m is not empty after clear()", m.isEmpty() );
+	}
+
+	@SuppressWarnings({ "boxing", "unchecked" })
+	protected static void test( int n ) {
+		ObjectBigArrayBigList m = new ObjectBigArrayBigList();
+		ObjectBigList t = ObjectBigLists.asBigList( new ObjectArrayList() );
+		k = new Object[ n ];
+		nk = new Object[ n ];
+		kt = new Object[ n ];
+		nkt = new Object[ n ];
+		for ( int i = 0; i < n; i++ ) {
+			k[ i ] = kt[ i ] = genKey();
+			nk[ i ] = nkt[ i ] = genKey();
+		}
+		/* We add pairs to t. */
+		for ( int i = 0; i < n; i++ ) t.add( k[ i ] );
+		/* We add to m the same data */
+		m.addAll( t );
+		testLists( m, t, n, 0 );
+
+		// This tests all reflection-based methods.
+		m = ObjectBigArrayBigList.wrap( ObjectBigArrays.EMPTY_BIG_ARRAY );
+		t = ObjectBigLists.asBigList( new ObjectArrayList() );
+		/* We add pairs to t. */
+		for ( int i = 0; i < n; i++ ) t.add( k[ i ] );
+		/* We add to m the same data */
+		m.addAll( t );
+		testLists( m, t, n, 0 );
+		return;
+	}
+
+	@Test
+	public void test1() {
+		test( 1 );
+	}
+
+	@Test
+	public void test10() {
+		test( 10 );
+	}
+
+	@Test
+	public void test100() {
+		test( 100 );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() {
+		test( 1000 );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/objects/ObjectBigArraysTest.java b/test/it/unimi/dsi/fastutil/objects/ObjectBigArraysTest.java
new file mode 100644
index 0000000..2c8353a
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/objects/ObjectBigArraysTest.java
@@ -0,0 +1,115 @@
+package it.unimi.dsi.fastutil.objects;
+
+import static org.junit.Assert.*;
+import static it.unimi.dsi.fastutil.objects.ObjectBigArrays.set;
+import static it.unimi.dsi.fastutil.objects.ObjectBigArrays.get;
+
+import java.util.Arrays;
+
+import org.junit.Test;
+
+public class ObjectBigArraysTest {
+
+	@SuppressWarnings({ "unchecked", "boxing" })
+	@Test
+	public void testQuickSort() {
+		Integer[] s = new Integer[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+
+		Arrays.sort( s );
+		Integer[][] sorted = ObjectBigArrays.wrap( s.clone() );
+
+		Integer[][] a = ObjectBigArrays.wrap( s.clone()  );
+
+		ObjectBigArrays.quickSort( a );
+		assertArrayEquals( sorted, a );
+
+		ObjectBigArrays.quickSort( a );
+		assertArrayEquals( sorted, a );
+		
+		a = ObjectBigArrays.wrap( s.clone()  );
+		
+		ObjectBigArrays.quickSort( a, ObjectComparators.NATURAL_COMPARATOR );
+		assertArrayEquals( sorted, a );
+
+		ObjectBigArrays.quickSort( a, ObjectComparators.NATURAL_COMPARATOR );
+		assertArrayEquals( sorted, a );
+		
+	}
+
+	@SuppressWarnings("boxing")
+	private void testCopy( int n ) {
+		Object[][] a = ObjectBigArrays.newBigArray( n );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		ObjectBigArrays.copy( a, 0, a, 1, n - 2 );
+		assertEquals( 0, a[ 0 ][ 0 ] );
+		for ( int i = 0; i < n - 2; i++ ) assertEquals( i,  get( a, i + 1 ) );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		ObjectBigArrays.copy( a, 1, a, 0, n - 1 );
+		for ( int i = 0; i < n - 1; i++ ) assertEquals( i + 1, get( a, i ) );
+		for ( int i = 0; i < n; i++ ) set( a, i, i );
+		Integer[] b = new Integer[ n ];
+		for ( int i = 0; i < n; i++ ) b[ i ] = i;
+		assertArrayEquals( a, ObjectBigArrays.wrap( b ) );
+	}
+	
+	@Test
+	public void testCopy10() {
+		testCopy( 10 );
+	}
+
+	@Test
+	public void testCopy1000() {
+		testCopy( 1000 );
+	}
+
+	@Test
+	public void testCopy1000000() {
+		testCopy( 1000000 );
+	}
+
+	@SuppressWarnings({ "boxing", "unchecked" })
+	@Test
+	public void testBinarySearch() {
+		Integer[] a = new Integer[] { 25, 32, 1, 3, 2, 0, 40, 7, 13, 12, 11, 10, -1, -6, -18, 2000 };
+		
+		Arrays.sort( a );
+		Integer[][] b = ObjectBigArrays.wrap( a.clone() );
+
+		for( int i = -1; i < 20; i++ ) { 
+			assertEquals( "" + i, Arrays.binarySearch( a, i ), ObjectBigArrays.binarySearch( b, i ) );
+			assertEquals( "" + i, Arrays.binarySearch( a, i ), ObjectBigArrays.binarySearch( b, i, ObjectComparators.NATURAL_COMPARATOR ) );
+		}
+	
+		for( int i = -1; i < 20; i++ ) {
+			assertEquals( Arrays.binarySearch( a, 5, 13, i ), ObjectBigArrays.binarySearch( b, 5, 13, i ) );
+			assertEquals( Arrays.binarySearch( a, 5, 13, i ), ObjectBigArrays.binarySearch( b, 5, 13, i, ObjectComparators.NATURAL_COMPARATOR ) );
+		}
+	}
+
+	@SuppressWarnings("boxing")
+	@Test
+	public void testTrim() {
+		Integer[] a = new Integer[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		Integer[][] b = ObjectBigArrays.wrap( a.clone() );
+
+		for( int i = a.length; i-- != 0; ) {
+			Integer[][] t = ObjectBigArrays.trim( b, i );
+			final long l = ObjectBigArrays.length( t );
+			assertEquals( i, l );
+			for( int p = 0; p < l; p++ ) assertEquals( a[ p ], ObjectBigArrays.get( t, p ) );
+			
+		}
+	}
+
+	@SuppressWarnings("boxing")
+	@Test
+	public void testEquals() {
+		Integer[] a = new Integer[] { 2, 1, 5, 2, 1, 0, 9, 1, 4, 2, 4, 6, 8, 9, 10, 12, 1, 7 };
+		Integer[][] b = ObjectBigArrays.wrap( a.clone() );
+		Integer[][] c = ObjectBigArrays.wrap( a.clone() );
+
+		assertTrue( ObjectBigArrays.equals( b, c ) );
+		b[ 0 ][ 0 ] = 0;
+		assertFalse( ObjectBigArrays.equals( b, c ) );
+	}
+}
\ No newline at end of file
diff --git a/test/it/unimi/dsi/fastutil/objects/ObjectOpenHashBigSetTest.java b/test/it/unimi/dsi/fastutil/objects/ObjectOpenHashBigSetTest.java
new file mode 100644
index 0000000..8507312
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/objects/ObjectOpenHashBigSetTest.java
@@ -0,0 +1,260 @@
+package it.unimi.dsi.fastutil.objects;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.booleans.BooleanBigArrays;
+
+import java.io.IOException;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class ObjectOpenHashBigSetTest {
+
+	@Test
+	@SuppressWarnings("boxing")
+	public void testStrangeRetainAllCase() {
+
+		ObjectArrayList<Integer> initialElements = ObjectArrayList.wrap(new Integer[] { 586, 940,
+				1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241,
+				1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360,
+				1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437,
+				1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547,
+				1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588,
+				1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654,
+				1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705,
+				1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756,
+				1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801,
+				1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839,
+				1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052,
+				3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137,
+				3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209,
+				3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247,
+				3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871,
+				4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925,
+				4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990,
+				4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076,
+				7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466,
+				7467 });
+
+		ObjectArrayList<Integer> retainElements = ObjectArrayList.wrap(new Integer[] { 586 });
+
+		// Initialize both implementations with the same data
+		ObjectOpenHashBigSet<Integer> instance = new ObjectOpenHashBigSet<Integer>(initialElements);
+		ObjectRBTreeSet<Integer> referenceInstance = new ObjectRBTreeSet<Integer>(initialElements);
+
+		instance.retainAll(retainElements);
+		referenceInstance.retainAll(retainElements);
+
+		// print the correct result {586}
+		System.out.println("ref: " + referenceInstance);
+
+		// prints {586, 7379}, which is clearly wrong
+		System.out.println("ohm: " + instance);
+
+		// Fails
+		assertEquals( referenceInstance, instance );
+	}	
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static Object genKey() {
+		return Integer.toBinaryString( r.nextInt() );
+	}
+
+	private static void checkTable( ObjectOpenHashBigSet<String> s ) {
+		final Object[][] key = s.key;
+		final boolean[][] used = s.used;
+		assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n;
+		assert s.n == ObjectBigArrays.length( key );
+		assert s.n == BooleanBigArrays.length( used );
+		long n = s.n;
+		while ( n-- != 0 )
+			if ( BooleanBigArrays.get( used, n ) && !s.contains( ObjectBigArrays.get( key, n ) ) ) throw new AssertionError( "Hash table has key " + ObjectBigArrays.get( key, n )
+					+ " marked as occupied, but the key does not belong to the table" );
+		java.util.HashSet<Object> t = new java.util.HashSet<Object>();
+		for ( long i = s.size64(); i-- != 0; )
+			if ( BooleanBigArrays.get( used, i ) && !t.add( ObjectBigArrays.get( key, i ) ) ) throw new AssertionError( "Key " + ObjectBigArrays.get( key, i ) + " appears twice" );
+	}
+
+	private static void printProbes( ObjectOpenHashBigSet m ) {
+		long totProbes = 0;
+		double totSquareProbes = 0;
+		long maxProbes = 0;
+		final double f = (double)m.size / m.n;
+		for ( long i = 0, c = 0; i < m.n; i++ ) {
+			if ( BooleanBigArrays.get( m.used, i ) ) c++;
+			else {
+				if ( c != 0 ) {
+					final long p = ( c + 1 ) * ( c + 2 ) / 2;
+					totProbes += p;
+					totSquareProbes += (double)p * p;
+				}
+				maxProbes = Math.max( c, maxProbes );
+				c = 0;
+				totProbes++;
+				totSquareProbes++;
+			}
+		}
+		final double expected = (double)totProbes / m.n;
+		System.err.println( "Expected probes: " + (
+				3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1
+				) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes );
+	}
+
+	@SuppressWarnings("unchecked")
+	private static void test( int n, float f ) throws IOException, ClassNotFoundException {
+		int c;
+		ObjectOpenHashBigSet m = new ObjectOpenHashBigSet( Hash.DEFAULT_INITIAL_SIZE, f );
+		java.util.Set t = new java.util.HashSet();
+		/* First of all, we fill t with random data. */
+		for ( int i = 0; i < f * n; i++ )
+			t.add( ( genKey() ) );
+		/* Now we add to m the same data */
+		m.addAll( t );
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		printProbes( m );
+		checkTable( m );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		c = 0;
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			c++;
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) );
+		}
+		assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() );
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ) == t.contains( ( T ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence between t and m (standard method)", m.contains( ( T ) ) == t.contains( ( T ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence in add() between t and m", m.add( ( T ) ) == t.add( ( T ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m", m.remove( ( T ) ) == t.remove( ( T ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		
+		checkTable( m );
+		printProbes( m );
+
+		/*
+		 * Now we check that m actually holds that data.
+		 */
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", m.contains( e ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", t.contains( e ) );
+		}
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		Object a[] = m.toArray();
+		assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new ObjectOpenHashBigSet( a ).equals( m ) );
+		/* Now we check cloning. */
+		assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) );
+		assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) );
+		int h = m.hashCode();
+		/* Now we save and read m. */
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (ObjectOpenHashBigSet)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() );
+		checkTable( m );
+		printProbes( m );
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after save/read", t.contains( e ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence in add() between t and m after save/read", m.add( ( T ) ) == t.add( ( T ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m after save/read", m.remove( ( T ) ) == t.remove( ( T ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); 		
+		/*
+		 * Now we take out of m everything , and check that it is empty.
+		 */
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			i.next();
+			i.remove();
+		}
+		assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() throws IOException, ClassNotFoundException {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+	
+	@Test
+	public void testGet() {
+		final ObjectOpenHashBigSet<String> s = new ObjectOpenHashBigSet<String>();
+		String a = "a";
+		assertTrue( s.add( a ) );
+		assertSame( a, s.get(  "a" ) );
+		assertNull( s.get(  "b" ) );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/objects/ObjectOpenHashSetTest.java b/test/it/unimi/dsi/fastutil/objects/ObjectOpenHashSetTest.java
new file mode 100644
index 0000000..9ba2704
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/objects/ObjectOpenHashSetTest.java
@@ -0,0 +1,271 @@
+package it.unimi.dsi.fastutil.objects;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.Hash;
+import it.unimi.dsi.fastutil.ints.IntArrayList;
+import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
+import it.unimi.dsi.fastutil.ints.IntRBTreeSet;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+ at SuppressWarnings("rawtypes")
+public class ObjectOpenHashSetTest {
+
+	
+	@Test
+	@SuppressWarnings("boxing")
+	public void testStrangeRetainAllCase() {
+
+		ObjectArrayList<Integer> initialElements = ObjectArrayList.wrap(new Integer[] { 586, 940,
+				1086, 1110, 1168, 1184, 1185, 1191, 1196, 1229, 1237, 1241,
+				1277, 1282, 1284, 1299, 1308, 1309, 1310, 1314, 1328, 1360,
+				1366, 1370, 1378, 1388, 1392, 1402, 1406, 1411, 1426, 1437,
+				1455, 1476, 1489, 1513, 1533, 1538, 1540, 1541, 1543, 1547,
+				1548, 1551, 1557, 1568, 1575, 1577, 1582, 1583, 1584, 1588,
+				1591, 1592, 1601, 1610, 1618, 1620, 1633, 1635, 1653, 1654,
+				1655, 1660, 1661, 1665, 1674, 1686, 1688, 1693, 1700, 1705,
+				1717, 1720, 1732, 1739, 1740, 1745, 1746, 1752, 1754, 1756,
+				1765, 1766, 1767, 1771, 1772, 1781, 1789, 1790, 1793, 1801,
+				1806, 1823, 1825, 1827, 1828, 1829, 1831, 1832, 1837, 1839,
+				1844, 2962, 2969, 2974, 2990, 3019, 3023, 3029, 3030, 3052,
+				3072, 3074, 3075, 3093, 3109, 3110, 3115, 3116, 3125, 3137,
+				3142, 3156, 3160, 3176, 3180, 3188, 3193, 3198, 3207, 3209,
+				3210, 3213, 3214, 3221, 3225, 3230, 3231, 3236, 3240, 3247,
+				3261, 4824, 4825, 4834, 4845, 4852, 4858, 4859, 4867, 4871,
+				4883, 4886, 4887, 4905, 4907, 4911, 4920, 4923, 4924, 4925,
+				4934, 4942, 4953, 4957, 4965, 4973, 4976, 4980, 4982, 4990,
+				4993, 6938, 6949, 6953, 7010, 7012, 7034, 7037, 7049, 7076,
+				7094, 7379, 7384, 7388, 7394, 7414, 7419, 7458, 7459, 7466,
+				7467 });
+
+		ObjectArrayList<Integer> retainElements = ObjectArrayList.wrap(new Integer[] { 586 });
+
+		// Initialize both implementations with the same data
+		ObjectOpenHashSet<Integer> instance = new ObjectOpenHashSet<Integer>(initialElements);
+		ObjectRBTreeSet<Integer> referenceInstance = new ObjectRBTreeSet<Integer>(initialElements);
+
+		instance.retainAll(retainElements);
+		referenceInstance.retainAll(retainElements);
+
+		// print the correct result {586}
+		System.out.println("ref: " + referenceInstance);
+
+		// prints {586, 7379}, which is clearly wrong
+		System.out.println("ohm: " + instance);
+
+		// Fails
+		assertEquals( referenceInstance, instance );
+	}	
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static Object genKey() {
+		return Integer.toBinaryString( r.nextInt() );
+	}
+
+	private static void checkTable( ObjectOpenHashSet<Integer> s ) {
+		final boolean[] used = s.used;
+		final Object[] key = s.key;
+		assert ( s.n & -s.n ) == s.n : "Table length is not a power of two: " + s.n;
+		assert s.n == ((Object[])s.key).length;
+		assert s.n == used.length;
+		int n = s.n;
+		while ( n-- != 0 )
+			if ( used[ n ] && !s.contains( key[ n ] ) ) throw new AssertionError( "Hash table has key " + key[ n ]
+					+ " marked as occupied, but the key does not belong to the table" );
+
+		java.util.HashSet<String> t = new java.util.HashSet<String>();
+		for ( int i = s.size(); i-- != 0; )
+			if ( used[ i ] && !t.add( (String)key[ i ] ) ) throw new AssertionError( "Key " + key[ i ] + " appears twice" );
+
+	}
+
+	private static void printProbes( ObjectOpenHashSet m ) {
+		long totProbes = 0;
+		double totSquareProbes = 0;
+		int maxProbes = 0;
+		final double f = (double)m.size / m.n;
+		for ( int i = 0, c = 0; i < m.n; i++ ) {
+			if ( m.used[ i ] ) c++;
+			else {
+				if ( c != 0 ) {
+					final long p = ( c + 1 ) * ( c + 2 ) / 2;
+					totProbes += p;
+					totSquareProbes += (double)p * p;
+				}
+				maxProbes = Math.max( c, maxProbes );
+				c = 0;
+				totProbes++;
+				totSquareProbes++;
+			}
+		}
+
+		final double expected = (double)totProbes / m.n;
+		System.err.println( "Expected probes: " + (
+				3 * Math.sqrt( 3 ) * ( f / ( ( 1 - f ) * ( 1 - f ) ) ) + 4 / ( 9 * f ) - 1
+				) + "; actual: " + expected + "; stddev: " + Math.sqrt( totSquareProbes / m.n - expected * expected ) + "; max probes: " + maxProbes );
+	}
+
+
+	@SuppressWarnings("unchecked")
+	private static void test( int n, float f ) {
+		int c;
+		ObjectOpenHashSet m = new ObjectOpenHashSet( Hash.DEFAULT_INITIAL_SIZE, f );
+		java.util.Set t = new java.util.HashSet();
+		/* First of all, we fill t with random data. */
+		for ( int i = 0; i < f * n; i++ )
+			t.add( ( genKey() ) );
+		/* Now we add to m the same data */
+		m.addAll( t );
+		assertTrue( "Error: !m.equals(t) after insertion", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after insertion", t.equals( m ) );
+		printProbes( m );
+		checkTable( m );
+		/* Now we check that m actually holds that data. */
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on t)", m.contains( e ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		c = 0;
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			c++;
+			assertTrue( "Error: m and t differ on a key (" + e + ") after insertion (iterating on m)", t.contains( e ) );
+		}
+		assertEquals( "Error: m has only " + c + " keys instead of " + t.size() + " after insertion (iterating on m)", c, t.size() );
+		/*
+		 * Now we check that inquiries about random data give the same answer in m and t. For m we
+		 * use the polymorphic method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence in keys between t and m (polymorphic method)", m.contains( T ) == t.contains( ( T ) ) );
+		}
+		/*
+		 * Again, we check that inquiries about random data give the same answer in m and t, but for
+		 * m we use the standard method.
+		 */
+		for ( int i = 0; i < n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence between t and m (standard method)", m.contains( ( T ) ) == t.contains( ( T ) ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence in add() between t and m", m.add( ( T ) ) == t.add( ( T ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m", m.remove( ( T ) ) == t.remove( ( T ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after removal", t.equals( m ) );
+		
+		checkTable( m );
+		printProbes( m );
+
+		/*
+		 * Now we check that m actually holds that data.
+		 */
+		for ( java.util.Iterator i = t.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on t)", m.contains( e ) );
+		}
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after removal (iterating on m)", t.contains( e ) );
+		}
+		/* Now we make m into an array, make it again a set and check it is OK. */
+		Object a[] = m.toArray();
+		assertTrue( "Error: toArray() output (or array-based constructor) is not OK", new ObjectOpenHashSet( a ).equals( m ) );
+		/* Now we check cloning. */
+		assertTrue( "Error: m does not equal m.clone()", m.equals( m.clone() ) );
+		assertTrue( "Error: m.clone() does not equal m", m.clone().equals( m ) );
+		int h = m.hashCode();
+		/* Now we save and read m. */
+		try {
+			java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+			java.io.OutputStream os = new java.io.FileOutputStream( ff );
+			java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+			oos.writeObject( m );
+			oos.close();
+			java.io.InputStream is = new java.io.FileInputStream( ff );
+			java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+			m = (ObjectOpenHashSet)ois.readObject();
+			ois.close();
+			ff.delete();
+		}
+		catch ( Exception e ) {
+			e.printStackTrace();
+			System.exit( 1 );
+		}
+		assertEquals( "Error: hashCode() changed after save/read", h, m.hashCode() );
+		checkTable( m );
+		printProbes( m );
+		/* Now we check that m actually holds that data, but iterating on m. */
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			Object e = i.next();
+			assertTrue( "Error: m and t differ on a key (" + e + ") after save/read", t.contains( e ) );
+		}
+		/* Now we put and remove random data in m and t, checking that the result is the same. */
+		for ( int i = 0; i < 20 * n; i++ ) {
+			Object T = genKey();
+			assertTrue( "Error: divergence in add() between t and m after save/read", m.add( ( T ) ) == t.add( ( T ) ) );
+			T = genKey();
+			assertTrue( "Error: divergence in remove() between t and m after save/read", m.remove( ( T ) ) == t.remove( ( T ) ) );
+		}
+		assertTrue( "Error: !m.equals(t) after post-save/read removal", m.equals( t ) );
+		assertTrue( "Error: !t.equals(m) after post-save/read removal", t.equals( m ) ); 		
+		/*
+		 * Now we take out of m everything , and check that it is empty.
+		 */
+		for ( java.util.Iterator i = m.iterator(); i.hasNext(); ) {
+			i.next();
+			i.remove();
+		}
+		assertTrue( "Error: m is not empty (as it should be)", m.isEmpty() );
+		return;
+	}
+
+	@Test
+	public void test1() {
+		test( 1, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1, Hash.FAST_LOAD_FACTOR );
+		test( 1, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test10() {
+		test( 10, Hash.DEFAULT_LOAD_FACTOR );
+		test( 10, Hash.FAST_LOAD_FACTOR );
+		test( 10, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void test100() {
+		test( 100, Hash.DEFAULT_LOAD_FACTOR );
+		test( 100, Hash.FAST_LOAD_FACTOR );
+		test( 100, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Ignore("Too long")
+	@Test
+	public void test1000() {
+		test( 1000, Hash.DEFAULT_LOAD_FACTOR );
+		test( 1000, Hash.FAST_LOAD_FACTOR );
+		test( 1000, Hash.VERY_FAST_LOAD_FACTOR );
+	}
+
+	@Test
+	public void testGet() {
+		final ObjectOpenHashSet<String> s = new ObjectOpenHashSet<String>();
+		String a = "a";
+		assertTrue( s.add( a ) );
+		assertSame( a, s.get(  "a" ) );
+		assertNull( s.get(  "b" ) );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/objects/Reference2ReferenceArrayMapTest.java b/test/it/unimi/dsi/fastutil/objects/Reference2ReferenceArrayMapTest.java
new file mode 100644
index 0000000..0045753
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/objects/Reference2ReferenceArrayMapTest.java
@@ -0,0 +1,99 @@
+package it.unimi.dsi.fastutil.objects;
+
+import it.unimi.dsi.fastutil.io.BinIO;
+import it.unimi.dsi.fastutil.objects.AbstractReference2ReferenceMap;
+import it.unimi.dsi.fastutil.objects.Object2ObjectArrayMap;
+import it.unimi.dsi.fastutil.objects.Reference2ReferenceArrayMap;
+import it.unimi.dsi.fastutil.objects.ReferenceOpenHashSet;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectOutputStream;
+import java.util.Map.Entry;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class Reference2ReferenceArrayMapTest {
+
+	@Test
+	public void testMap() {
+		for( int i = 0; i <= 2; i++ ) {
+			final Reference2ReferenceArrayMap<Object,Object> m = i == 0 ? new Reference2ReferenceArrayMap<Object,Object>() : new Reference2ReferenceArrayMap<Object,Object>( i );
+			Integer one = new Integer( 1 ), two = new Integer( 2 ), three = new Integer( 3 );
+			assertEquals( null, m.put( one, one ) );
+			assertEquals( 1, m.size() );
+			assertTrue( m.containsKey( one ) );
+			assertTrue( m.containsValue( one ) );
+			assertEquals( null, m.put( two, two  ) );
+			assertTrue( m.containsKey( two ) );
+			assertTrue( m.containsValue( two ) );
+			assertEquals( 2, m.size() );
+			assertEquals( one, m.put( one, three ) );
+			assertTrue( m.containsValue( three ) );
+			assertEquals( null, m.remove( three ) );
+			assertEquals( null, m.put( three, three  ) );
+			assertTrue( m.containsKey( three ) );
+
+			assertEquals( new ReferenceOpenHashSet<Object>( new Object[] { one, two, three } ), new ReferenceOpenHashSet<Object>( m.keySet().iterator() ) );
+			assertEquals( new ReferenceOpenHashSet<Object>( new Object[] { three, two, three } ), new ReferenceOpenHashSet<Object>( m.values().iterator() ) );
+
+			for( Entry<Object, Object> e: m.entrySet() ) assertEquals( e.getValue(), m.get( e.getKey() ) );
+
+			assertTrue( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry<Object,Object>( one, three ) ) );
+			assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry<Object,Object>( one, new Integer( 3 ) ) ) );
+			assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry<Object,Object>( new Integer( 1 ), three ) ) );
+			assertTrue( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry<Object,Object>( two, two ) ) );
+			assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry<Object,Object>( one, two ) ) );
+			assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry<Object,Object>( two, one ) ) );
+			assertTrue( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry<Object,Object>( three, three ) ) );
+			assertFalse( m.entrySet().contains( new AbstractReference2ReferenceMap.BasicEntry<Object,Object>( new Integer( 3 ), two ) ) );
+
+			assertEquals( three, m.remove( three ) );
+			assertEquals( 2, m.size() );
+			assertEquals( three, m.remove( one ) );
+			assertEquals( 1, m.size() );
+			assertFalse( m.containsKey( one ) );
+			assertEquals( two, m.remove( two ) );
+			assertEquals( 0, m.size() );
+			assertFalse( m.containsKey( one ) );
+		}
+	}
+	
+	@Test
+	public void testClone() {
+		Reference2ReferenceArrayMap<Integer, Integer> m = new Reference2ReferenceArrayMap<Integer, Integer>();
+		assertEquals( m, m.clone() );
+		m.put( new Integer( 0 ), new Integer( 1 ) );
+		assertEquals( m, m.clone() );
+		m.put( new Integer( 0 ), new Integer( 2 ) );
+		assertEquals( m, m.clone() );
+		Integer one;
+		m.put( one = new Integer( 1 ), new Integer( 2 ) );
+		assertEquals( m, m.clone() );
+		m.remove( one );
+		assertEquals( m, m.clone() );
+	}
+
+	@Test
+	public void testSerialisation() throws IOException, ClassNotFoundException {
+		// We can't really test reference maps as equals() doesnt' work
+		Object2ObjectArrayMap<Integer, Integer> m = new Object2ObjectArrayMap<Integer, Integer>();
+		ByteArrayOutputStream baos = new ByteArrayOutputStream();
+		ObjectOutputStream oos = new ObjectOutputStream( baos );
+		oos.writeObject( m );
+		oos.close();
+		assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) );
+		
+		m.put( new Integer( 0 ), new Integer( 1 ) );
+		m.put( new Integer( 1 ), new Integer( 2 ) );
+
+		baos.reset();
+		oos = new ObjectOutputStream( baos );
+		oos.writeObject( m );
+		oos.close();
+		assertEquals( m, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/objects/ReferenceArraySetTest.java b/test/it/unimi/dsi/fastutil/objects/ReferenceArraySetTest.java
new file mode 100644
index 0000000..86fad02
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/objects/ReferenceArraySetTest.java
@@ -0,0 +1,87 @@
+package it.unimi.dsi.fastutil.objects;
+
+import it.unimi.dsi.fastutil.io.BinIO;
+import it.unimi.dsi.fastutil.objects.ObjectArraySet;
+import it.unimi.dsi.fastutil.objects.ReferenceArraySet;
+import it.unimi.dsi.fastutil.objects.ReferenceOpenHashSet;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectOutputStream;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class ReferenceArraySetTest {
+
+	@Test
+	public void testSet() {
+		for( int i = 0; i <= 2; i++ ) {
+			final ReferenceArraySet<Object> s = i == 0 ? new ReferenceArraySet<Object>() : new ReferenceArraySet<Object>( 2 );
+			Integer one = new Integer( 1 ), two = new Integer( 2 ), three = new Integer( 3 );
+			assertTrue( s.add( one ) );
+			assertEquals( 1, s.size() );
+			assertTrue( s.contains( one ) );
+			assertFalse( s.contains( new Integer( 1 ) ) );
+			assertTrue( s.add(  two  ) );
+			assertTrue( s.contains( two ) );
+			assertFalse( s.contains( new Integer( 2 ) ) );
+			assertEquals( 2, s.size() );
+			assertFalse( s.add( one ) );
+			assertFalse( s.remove( three ) );
+			assertTrue( s.add( three ) );
+			assertEquals( 3, s.size() );
+			assertTrue( s.contains( one ) );
+			assertTrue( s.contains( two ) );
+			assertTrue( s.contains( three ) );
+			assertEquals( new ReferenceOpenHashSet<Object>( new Object[] { one, two, three } ), new ReferenceOpenHashSet<Object>( s.iterator() ) );
+			assertTrue( s.remove( three ) );
+			assertEquals( 2, s.size() );
+			assertTrue( s.remove( one ) );
+			assertEquals( 1, s.size() );
+			assertFalse( s.contains( one ) );
+			assertTrue( s.remove( two ) );
+			assertEquals( 0, s.size() );
+			assertFalse( s.contains( one ) );
+		}
+	}
+
+	@Test
+	public void testClone() {
+		ReferenceArraySet<Integer> s = new ReferenceArraySet<Integer>();
+		assertEquals( s, s.clone() );
+		Integer zero;
+		s.add( zero = new Integer( 0 ) );
+		assertEquals( s, s.clone() );
+		s.add( new Integer( 0 ) );
+		assertEquals( s, s.clone() );
+		s.add( new Integer( 1 ) );
+		assertEquals( s, s.clone() );
+		s.add( new Integer( 2 ) );
+		assertEquals( s, s.clone() );
+		s.remove( zero );
+		assertEquals( s, s.clone() );
+	}
+
+	@Test
+	public void testSerialisation() throws IOException, ClassNotFoundException {
+		// We can't really test reference maps as equals() doesnt' work
+		ObjectArraySet<Integer> s = new ObjectArraySet<Integer>();
+		ByteArrayOutputStream baos = new ByteArrayOutputStream();
+		ObjectOutputStream oos = new ObjectOutputStream( baos );
+		oos.writeObject( s );
+		oos.close();
+		assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) );
+		
+		s.add( new Integer( 0 ) );
+		s.add( new Integer( 1 ) );
+
+		baos.reset();
+		oos = new ObjectOutputStream( baos );
+		oos.writeObject( s );
+		oos.close();
+		assertEquals( s, BinIO.loadObject( new ByteArrayInputStream( baos.toByteArray() ) ) );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/shorts/ShortArrayFrontCodedListTest.java b/test/it/unimi/dsi/fastutil/shorts/ShortArrayFrontCodedListTest.java
new file mode 100644
index 0000000..e7246ed
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/shorts/ShortArrayFrontCodedListTest.java
@@ -0,0 +1,125 @@
+package it.unimi.dsi.fastutil.shorts;
+
+import static org.junit.Assert.assertTrue;
+import it.unimi.dsi.fastutil.objects.ObjectListIterator;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+ at SuppressWarnings({ "rawtypes", "unchecked" })
+public class ShortArrayFrontCodedListTest {
+
+
+	private static java.util.Random r = new java.util.Random( 0 );
+
+	private static short genKey() {
+		return (short)( r.nextInt() );
+	}
+
+	private static boolean contentEquals( java.util.List x, java.util.List y ) {
+		if ( x.size() != y.size() ) return false;
+		for ( int i = 0; i < x.size(); i++ )
+			if ( !java.util.Arrays.equals( (short[])x.get( i ), (short[])y.get( i ) ) ) return false;
+		return true;
+	}
+
+	private static int l[];
+
+	private static short[][] a;
+
+	private static void test( int n ) throws IOException, ClassNotFoundException {
+		l = new int[ n ];
+		a = new short[ n ][];
+		for ( int i = 0; i < n; i++ )
+			l[ i ] = (int)( Math.abs( r.nextGaussian() ) * 32 );
+		for ( int i = 0; i < n; i++ )
+			a[ i ] = new short[ l[ i ] ];
+		for ( int i = 0; i < n; i++ )
+			for ( int j = 0; j < l[ i ]; j++ )
+				a[ i ][ j ] = genKey();
+		ShortArrayFrontCodedList m = new ShortArrayFrontCodedList( it.unimi.dsi.fastutil.objects.ObjectIterators.wrap( a ), r.nextInt( 4 ) + 1 );
+		it.unimi.dsi.fastutil.objects.ObjectArrayList t = new it.unimi.dsi.fastutil.objects.ObjectArrayList( a );
+		// System.out.println(m);
+		// for( i = 0; i < t.size(); i++ )
+		// System.out.println(ARRAY_LIST.wrap((KEY_TYPE[])t.get(i)));
+		/* Now we check that m actually holds that data. */
+		assertTrue( "Error: m does not equal t at creation", contentEquals( m, t ) );
+		/* Now we check cloning. */
+		assertTrue( "Error: m does not equal m.clone()", contentEquals( m, m.clone() ) );
+		/* Now we play with iterators. */
+		{
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator();
+			j = t.listIterator();
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext()", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next()", java.util.Arrays.equals( (short[])i.next(), (short[])j.next() ) );
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous()", java.util.Arrays.equals( (short[])i.previous(), (short[])j.previous() ) );
+				}
+				assertTrue( "Error: divergence in nextIndex()", i.nextIndex() == j.nextIndex() );
+				assertTrue( "Error: divergence in previousIndex()", i.previousIndex() == j.previousIndex() );
+			}
+		}
+		{
+			int from = r.nextInt( m.size() + 1 );
+			ObjectListIterator i;
+			java.util.ListIterator j;
+			i = m.listIterator( from );
+			j = t.listIterator( from );
+			for ( int k = 0; k < 2 * n; k++ ) {
+				assertTrue( "Error: divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext() );
+				assertTrue( "Error: divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious() );
+				if ( r.nextFloat() < .8 && i.hasNext() ) {
+					assertTrue( "Error: divergence in next() (iterator with starting point " + from + ")", java.util.Arrays.equals( (short[])i.next(), (short[])j.next() ) );
+					// System.err.println("Done next " + I + " " + J + "  " + badPrevious);
+				}
+				else if ( r.nextFloat() < .2 && i.hasPrevious() ) {
+					assertTrue( "Error: divergence in previous() (iterator with starting point " + from + ")", java.util.Arrays.equals( (short[])i.previous(), (short[])j.previous() ) );
+				}
+			}
+		}
+		java.io.File ff = new java.io.File( "it.unimi.dsi.fastutil.test" );
+		java.io.OutputStream os = new java.io.FileOutputStream( ff );
+		java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( os );
+		oos.writeObject( m );
+		oos.close();
+		java.io.InputStream is = new java.io.FileInputStream( ff );
+		java.io.ObjectInputStream ois = new java.io.ObjectInputStream( is );
+		m = (ShortArrayFrontCodedList)ois.readObject();
+		ois.close();
+		ff.delete();
+		assertTrue( "Error: m does not equal t after save/read", contentEquals( m, t ) );
+		return;
+	}
+
+	@Test
+	public void test1() throws IOException, ClassNotFoundException {
+		test( 1 );
+	}
+
+	@Test
+	public void test10() throws Exception, ClassNotFoundException {
+		test( 10 );
+	}
+
+	@Test
+	public void test100() throws IOException, ClassNotFoundException {
+		test( 100 );
+	}
+
+	@Test
+	public void test1000() throws IOException, ClassNotFoundException {
+		test( 1000 );
+	}
+
+	@Test
+	public void test10000() throws IOException, ClassNotFoundException {
+		test( 10000 );
+	}
+}
diff --git a/test/it/unimi/dsi/fastutil/shorts/ShortArraysTest.java b/test/it/unimi/dsi/fastutil/shorts/ShortArraysTest.java
new file mode 100644
index 0000000..c583a25
--- /dev/null
+++ b/test/it/unimi/dsi/fastutil/shorts/ShortArraysTest.java
@@ -0,0 +1,141 @@
+package it.unimi.dsi.fastutil.shorts;
+
+import static org.junit.Assert.assertTrue;
+import java.util.Random;
+
+import org.junit.Test;
+
+public class ShortArraysTest {
+	
+	private static short[] castIdentity( int n ) {
+		final short[] a = new short[ n ];
+		while( n-- != 0 ) a[ n ] = (short)n;
+		return a;
+	}
+
+
+	@Test
+	public void testRadixSort1() {
+		short[] t = { 2, 1, 0, 4 };
+		ShortArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = new short[] { 2, -1, 0, -4 };
+		ShortArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+		
+		t = ShortArrays.shuffle( castIdentity( 100 ), new Random( 0 ) );
+		ShortArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new short[ 100 ];
+		Random random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new short[ 100000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+
+		t = new short[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = t.length; i-- != 0; ) t[ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( t );
+		for( int i = t.length - 1; i-- != 0; ) assertTrue( t[ i ] <= t[ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort2() {
+		short[][] d = new short[ 2 ][];
+
+		d[ 0 ] = new short[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( 3 - i % 3 );
+		d[ 1 ] = ShortArrays.shuffle( castIdentity( 10 ), new Random( 0 ) );
+		ShortArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new short[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( 100 - i % 100 );
+		d[ 1 ] = ShortArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) );
+		ShortArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new short[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( i % 3 - 2 );
+		Random random = new Random( 0 );
+		d[ 1 ] = new short[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+		
+		d[ 0 ] = new short[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt();
+		d[ 1 ] = new short[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new short[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt();
+		d[ 1 ] = new short[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( d[ 0 ], d[ 1 ] );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+
+	@Test
+	public void testRadixSort() {
+		short[][] t = { { 2, 1, 0, 4 } };
+		ShortArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+		
+		t[ 0 ] = ShortArrays.shuffle( castIdentity( 100 ), new Random( 0 ) );
+		ShortArrays.radixSort( t );
+		for( int i = t[ 0 ].length - 1; i-- != 0; ) assertTrue( t[ 0 ][ i ] <= t[ 0 ][ i + 1 ] );
+
+		short[][] d = new short[ 2 ][];
+
+		d[ 0 ] = new short[ 10 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( 3 - i % 3 );
+		d[ 1 ] = ShortArrays.shuffle( castIdentity( 10 ), new Random( 0 ) );
+		ShortArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new short[ 100000 ];
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)( 100 - i % 100 );
+		d[ 1 ] = ShortArrays.shuffle( castIdentity( 100000 ), new Random( 6 ) );
+		ShortArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new short[ 10 ];
+		Random random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt();
+		d[ 1 ] = new short[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		
+		d[ 0 ] = new short[ 100000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt();
+		d[ 1 ] = new short[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+
+		d[ 0 ] = new short[ 10000000 ];
+		random = new Random( 0 );
+		for( int i = d[ 0 ].length; i-- != 0; ) d[ 0 ][ i ] = (short)random.nextInt();
+		d[ 1 ] = new short[ d[ 0 ].length ];
+		for( int i = d.length; i-- != 0; ) d[ 1 ][ i ] = (short)random.nextInt();
+		ShortArrays.radixSort( d );
+		for( int i = d[ 0 ].length - 1; i-- != 0; ) assertTrue( Integer.toString( i ) + ": <" + d[ 0 ][ i ] + ", " + d[ 1 ][ i ] + ">, <" + d[ 0 ][ i + 1 ] + ", " +  d[ 1 ][ i + 1 ] + ">", d[ 0 ][ i ] < d[ 0 ][ i + 1 ] || d[ 0 ][ i ] == d[ 0 ][ i + 1 ] && d[ 1 ][ i ] <= d[ 1 ][ i + 1 ] );
+	}
+}

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-java/libfastutil-java.git



More information about the pkg-java-commits mailing list